[ 500.515961] env[63297]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63297) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 500.516370] env[63297]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63297) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 500.516370] env[63297]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63297) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 500.516694] env[63297]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 500.613052] env[63297]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63297) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 500.623566] env[63297]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=63297) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 501.222338] env[63297]: INFO nova.virt.driver [None req-ad0385ef-03ad-42b3-aa26-97d0fc73e1f3 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 501.292715] env[63297]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.292876] env[63297]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.292979] env[63297]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63297) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 504.411349] env[63297]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-07b7ba80-a485-4d42-b29c-1701cc4dd0a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.428708] env[63297]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63297) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 504.428975] env[63297]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-1c7e9600-3fc6-4b48-a1af-830927ce7cdb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.453337] env[63297]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 03276. [ 504.453574] env[63297]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.161s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 504.454305] env[63297]: INFO nova.virt.vmwareapi.driver [None req-ad0385ef-03ad-42b3-aa26-97d0fc73e1f3 None None] VMware vCenter version: 7.0.3 [ 504.457908] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca41b30a-6a0e-47c8-93d3-5228e6ef2296 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.476135] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ac9e4e-319c-4c61-bcb5-fb23ca06adfb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.482913] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83750899-f692-4996-ad8d-7ba95e184035 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.490030] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf716a55-6881-4930-80fe-9861160f5437 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.503670] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b2c944-b49e-4825-b294-6a5eeab5bf0c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.510099] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf02f2c0-dabf-4b8c-a807-328d18c8bd01 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.542159] env[63297]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-f285aaf4-eb2b-49e3-8234-1d79bbccb095 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.547604] env[63297]: DEBUG nova.virt.vmwareapi.driver [None req-ad0385ef-03ad-42b3-aa26-97d0fc73e1f3 None None] Extension org.openstack.compute already exists. {{(pid=63297) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 504.550390] env[63297]: INFO nova.compute.provider_config [None req-ad0385ef-03ad-42b3-aa26-97d0fc73e1f3 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 505.053780] env[63297]: DEBUG nova.context [None req-ad0385ef-03ad-42b3-aa26-97d0fc73e1f3 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),bbaeb2bb-fdab-4f3a-a710-76c0678b2f09(cell1) {{(pid=63297) load_cells /opt/stack/nova/nova/context.py:464}} [ 505.055918] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.056171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.056831] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 505.057279] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Acquiring lock "bbaeb2bb-fdab-4f3a-a710-76c0678b2f09" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.057472] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Lock "bbaeb2bb-fdab-4f3a-a710-76c0678b2f09" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.058489] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Lock "bbaeb2bb-fdab-4f3a-a710-76c0678b2f09" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 505.079071] env[63297]: INFO dbcounter [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Registered counter for database nova_cell0 [ 505.087953] env[63297]: INFO dbcounter [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Registered counter for database nova_cell1 [ 505.091087] env[63297]: DEBUG oslo_db.sqlalchemy.engines [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63297) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 505.091507] env[63297]: DEBUG oslo_db.sqlalchemy.engines [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63297) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 505.096528] env[63297]: ERROR nova.db.main.api [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 505.096528] env[63297]: result = function(*args, **kwargs) [ 505.096528] env[63297]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 505.096528] env[63297]: return func(*args, **kwargs) [ 505.096528] env[63297]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 505.096528] env[63297]: result = fn(*args, **kwargs) [ 505.096528] env[63297]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 505.096528] env[63297]: return f(*args, **kwargs) [ 505.096528] env[63297]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 505.096528] env[63297]: return db.service_get_minimum_version(context, binaries) [ 505.096528] env[63297]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 505.096528] env[63297]: _check_db_access() [ 505.096528] env[63297]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 505.096528] env[63297]: stacktrace = ''.join(traceback.format_stack()) [ 505.096528] env[63297]: [ 505.097342] env[63297]: ERROR nova.db.main.api [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 505.097342] env[63297]: result = function(*args, **kwargs) [ 505.097342] env[63297]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 505.097342] env[63297]: return func(*args, **kwargs) [ 505.097342] env[63297]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 505.097342] env[63297]: result = fn(*args, **kwargs) [ 505.097342] env[63297]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 505.097342] env[63297]: return f(*args, **kwargs) [ 505.097342] env[63297]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 505.097342] env[63297]: return db.service_get_minimum_version(context, binaries) [ 505.097342] env[63297]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 505.097342] env[63297]: _check_db_access() [ 505.097342] env[63297]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 505.097342] env[63297]: stacktrace = ''.join(traceback.format_stack()) [ 505.097342] env[63297]: [ 505.097897] env[63297]: WARNING nova.objects.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Failed to get minimum service version for cell bbaeb2bb-fdab-4f3a-a710-76c0678b2f09 [ 505.097897] env[63297]: WARNING nova.objects.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 505.098289] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Acquiring lock "singleton_lock" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 505.098450] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Acquired lock "singleton_lock" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 505.098688] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Releasing lock "singleton_lock" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 505.099015] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Full set of CONF: {{(pid=63297) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 505.099165] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ******************************************************************************** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 505.099293] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Configuration options gathered from: {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 505.099425] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 505.099609] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 505.099735] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ================================================================================ {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 505.099939] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] allow_resize_to_same_host = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.100126] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] arq_binding_timeout = 300 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.100258] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] backdoor_port = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.100384] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] backdoor_socket = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.100546] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] block_device_allocate_retries = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.100703] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] block_device_allocate_retries_interval = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.100895] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cert = self.pem {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.101084] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.101256] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute_monitors = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.101425] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] config_dir = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.101593] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] config_drive_format = iso9660 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.101725] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.101900] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] config_source = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.102097] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] console_host = devstack {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.102267] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] control_exchange = nova {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.102425] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cpu_allocation_ratio = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.102584] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] daemon = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.102749] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] debug = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.102903] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] default_access_ip_network_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.103078] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] default_availability_zone = nova {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.103268] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] default_ephemeral_format = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.103431] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] default_green_pool_size = 1000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.103671] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.103834] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] default_schedule_zone = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.103991] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] disk_allocation_ratio = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.104164] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] enable_new_services = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.104341] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] enabled_apis = ['osapi_compute'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.104501] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] enabled_ssl_apis = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.104658] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] flat_injected = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.104812] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] force_config_drive = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.104971] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] force_raw_images = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.105149] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] graceful_shutdown_timeout = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.105308] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] heal_instance_info_cache_interval = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.105535] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] host = cpu-1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.105712] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.105874] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] initial_disk_allocation_ratio = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.106052] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] initial_ram_allocation_ratio = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.106271] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.106432] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] instance_build_timeout = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.106589] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] instance_delete_interval = 300 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.106754] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] instance_format = [instance: %(uuid)s] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.106915] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] instance_name_template = instance-%08x {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.107090] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] instance_usage_audit = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.107260] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] instance_usage_audit_period = month {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.107423] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.107587] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] instances_path = /opt/stack/data/nova/instances {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.107748] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] internal_service_availability_zone = internal {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.107903] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] key = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.108074] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] live_migration_retry_count = 30 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.108244] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_color = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.108406] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_config_append = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.108564] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.108722] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_dir = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.108873] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.109007] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_options = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.109172] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_rotate_interval = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.109336] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_rotate_interval_type = days {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.109498] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] log_rotation_type = none {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.109625] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.109746] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.109908] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.110086] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.110225] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.110375] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] long_rpc_timeout = 1800 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.110529] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] max_concurrent_builds = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.110684] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] max_concurrent_live_migrations = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.110870] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] max_concurrent_snapshots = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.111048] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] max_local_block_devices = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.111212] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] max_logfile_count = 30 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.111365] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] max_logfile_size_mb = 200 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.111519] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] maximum_instance_delete_attempts = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.111681] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] metadata_listen = 0.0.0.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.111842] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] metadata_listen_port = 8775 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.112042] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] metadata_workers = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.112207] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] migrate_max_retries = -1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.112371] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] mkisofs_cmd = genisoimage {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.112573] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] my_block_storage_ip = 10.180.1.21 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.112705] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] my_ip = 10.180.1.21 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.112863] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] network_allocate_retries = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.113049] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.113252] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] osapi_compute_listen = 0.0.0.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.113424] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] osapi_compute_listen_port = 8774 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.113590] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] osapi_compute_unique_server_name_scope = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.113753] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] osapi_compute_workers = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.113914] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] password_length = 12 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.114089] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] periodic_enable = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.114251] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] periodic_fuzzy_delay = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.114415] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] pointer_model = usbtablet {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.114579] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] preallocate_images = none {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.114737] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] publish_errors = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.114867] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] pybasedir = /opt/stack/nova {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.115031] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ram_allocation_ratio = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.115193] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] rate_limit_burst = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.115358] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] rate_limit_except_level = CRITICAL {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.115514] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] rate_limit_interval = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.115671] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] reboot_timeout = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.115826] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] reclaim_instance_interval = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.115992] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] record = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.116162] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] reimage_timeout_per_gb = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.116325] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] report_interval = 120 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.116483] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] rescue_timeout = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.116638] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] reserved_host_cpus = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.116792] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] reserved_host_disk_mb = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.116948] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] reserved_host_memory_mb = 512 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.117112] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] reserved_huge_pages = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.117269] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] resize_confirm_window = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.117426] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] resize_fs_using_block_device = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.117579] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] resume_guests_state_on_host_boot = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.117742] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.117897] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] rpc_response_timeout = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.118067] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] run_external_periodic_tasks = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.118235] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] running_deleted_instance_action = reap {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.118394] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] running_deleted_instance_poll_interval = 1800 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.118552] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] running_deleted_instance_timeout = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.118709] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler_instance_sync_interval = 120 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.118873] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_down_time = 720 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.119053] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] servicegroup_driver = db {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.119212] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] shell_completion = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.119369] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] shelved_offload_time = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.119525] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] shelved_poll_interval = 3600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.119691] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] shutdown_timeout = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.119851] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] source_is_ipv6 = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.120015] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ssl_only = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.120271] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.120438] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] sync_power_state_interval = 600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.120611] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] sync_power_state_pool_size = 1000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.120782] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] syslog_log_facility = LOG_USER {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.120962] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] tempdir = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.121138] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] timeout_nbd = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.121305] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] transport_url = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.121465] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] update_resources_interval = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.121623] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] use_cow_images = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.121783] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] use_eventlog = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.121967] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] use_journal = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.122152] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] use_json = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.122314] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] use_rootwrap_daemon = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.122471] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] use_stderr = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.122626] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] use_syslog = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.122781] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vcpu_pin_set = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.122950] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plugging_is_fatal = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.123148] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plugging_timeout = 300 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.123332] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] virt_mkfs = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.123499] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] volume_usage_poll_interval = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.123651] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] watch_log_file = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.123817] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] web = /usr/share/spice-html5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 505.124007] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.124182] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.124344] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.124511] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_concurrency.disable_process_locking = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.125067] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.125262] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.125433] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.125605] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.125773] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.125937] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.126138] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.auth_strategy = keystone {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.126313] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.compute_link_prefix = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.126483] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.126658] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.dhcp_domain = novalocal {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.126829] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.enable_instance_password = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.126996] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.glance_link_prefix = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.127176] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.127348] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.127511] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.instance_list_per_project_cells = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.127688] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.list_records_by_skipping_down_cells = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.127842] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.local_metadata_per_cell = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.128019] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.max_limit = 1000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.128192] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.metadata_cache_expiration = 15 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.128367] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.neutron_default_tenant_id = default {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.128539] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.response_validation = warn {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.128713] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.use_neutron_default_nets = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.128881] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.129058] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.129228] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.129401] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.129572] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.vendordata_dynamic_targets = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.129735] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.vendordata_jsonfile_path = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.129914] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.130122] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.backend = dogpile.cache.memcached {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.130293] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.backend_argument = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.130463] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.backend_expiration_time = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.130631] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.config_prefix = cache.oslo {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.130824] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.dead_timeout = 60.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.131011] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.debug_cache_backend = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.131189] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.enable_retry_client = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.131355] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.enable_socket_keepalive = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.131530] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.enabled = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.131696] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.enforce_fips_mode = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.131868] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.expiration_time = 600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.132060] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.hashclient_retry_attempts = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.132237] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.hashclient_retry_delay = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.132404] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_dead_retry = 300 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.132566] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_password = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.132730] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.132892] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.133070] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_pool_maxsize = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.133269] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.133450] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_sasl_enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.133629] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.133801] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_socket_timeout = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.133964] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.memcache_username = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.134148] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.proxies = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.134315] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.redis_db = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.134475] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.redis_password = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.134646] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.redis_sentinel_service_name = mymaster {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.134851] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.134992] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.redis_server = localhost:6379 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.135173] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.redis_socket_timeout = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.135334] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.redis_username = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.135497] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.retry_attempts = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.135661] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.retry_delay = 0.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.135824] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.socket_keepalive_count = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.135987] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.socket_keepalive_idle = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.136172] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.socket_keepalive_interval = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.136333] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.tls_allowed_ciphers = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.136488] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.tls_cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.136643] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.tls_certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.136803] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.tls_enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.136963] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cache.tls_keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.137147] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.auth_section = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.137320] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.auth_type = password {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.137483] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.137659] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.catalog_info = volumev3::publicURL {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.137821] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.137983] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.138159] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.cross_az_attach = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.138320] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.debug = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.138481] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.endpoint_template = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.138642] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.http_retries = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.138811] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.138963] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.139154] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.os_region_name = RegionOne {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.139317] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.139474] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cinder.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.139648] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.139809] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.cpu_dedicated_set = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.139968] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.cpu_shared_set = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.140148] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.image_type_exclude_list = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.140313] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.140480] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.max_concurrent_disk_ops = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.140641] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.max_disk_devices_to_attach = -1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.140831] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.140995] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.141176] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.resource_provider_association_refresh = 300 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.141336] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.141496] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.shutdown_retry_interval = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.141674] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.141857] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] conductor.workers = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.142070] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] console.allowed_origins = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.142246] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] console.ssl_ciphers = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.142414] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] console.ssl_minimum_version = default {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.142585] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] consoleauth.enforce_session_timeout = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.142751] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] consoleauth.token_ttl = 600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.142919] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.143094] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.143283] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.143444] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.connect_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.143621] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.connect_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.143758] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.endpoint_override = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.143918] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.144089] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.144251] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.max_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.144408] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.min_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.144565] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.region_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.144721] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.retriable_status_codes = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.144878] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.service_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.145114] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.service_type = accelerator {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.145220] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.145380] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.status_code_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.145538] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.status_code_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.145693] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.145873] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.146044] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] cyborg.version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.146227] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.backend = sqlalchemy {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.146397] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.connection = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.146563] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.connection_debug = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.146731] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.connection_parameters = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.146894] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.connection_recycle_time = 3600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.147070] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.connection_trace = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.147236] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.db_inc_retry_interval = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.147399] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.db_max_retries = 20 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.147559] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.db_max_retry_interval = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.147721] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.db_retry_interval = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.147881] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.max_overflow = 50 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.148055] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.max_pool_size = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.148220] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.max_retries = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.148387] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.148546] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.mysql_wsrep_sync_wait = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.148711] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.pool_timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.148885] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.retry_interval = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.149058] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.slave_connection = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.149226] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.sqlite_synchronous = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.149388] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] database.use_db_reconnect = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.149566] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.backend = sqlalchemy {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.149733] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.connection = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.149898] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.connection_debug = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.150079] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.connection_parameters = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.150243] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.connection_recycle_time = 3600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.150407] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.connection_trace = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.150568] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.db_inc_retry_interval = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.150730] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.db_max_retries = 20 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.150924] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.db_max_retry_interval = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.151111] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.db_retry_interval = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.151278] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.max_overflow = 50 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.151440] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.max_pool_size = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.151602] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.max_retries = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.151774] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.151951] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.152139] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.pool_timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.152305] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.retry_interval = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.152465] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.slave_connection = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.152628] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] api_database.sqlite_synchronous = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.152803] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] devices.enabled_mdev_types = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.152984] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.153198] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ephemeral_storage_encryption.default_format = luks {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.153376] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ephemeral_storage_encryption.enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.153572] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.153719] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.api_servers = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.153878] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.154058] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.154229] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.154387] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.connect_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.154546] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.connect_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.154708] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.debug = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.154873] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.default_trusted_certificate_ids = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.155044] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.enable_certificate_validation = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.155210] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.enable_rbd_download = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.155408] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.endpoint_override = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.155531] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.155689] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.155846] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.max_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.156008] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.min_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.156178] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.num_retries = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.156348] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.rbd_ceph_conf = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.156509] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.rbd_connect_timeout = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.156674] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.rbd_pool = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.156839] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.rbd_user = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.156998] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.region_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.157169] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.retriable_status_codes = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.157326] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.service_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.157496] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.service_type = image {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.157665] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.157823] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.status_code_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.157982] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.status_code_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.158169] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.158356] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.158515] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.verify_glance_signatures = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.158675] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] glance.version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.158840] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] guestfs.debug = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.159025] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] mks.enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.159374] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.159570] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] image_cache.manager_interval = 2400 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.159739] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] image_cache.precache_concurrency = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.159911] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] image_cache.remove_unused_base_images = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.160095] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.160268] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.160448] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] image_cache.subdirectory_name = _base {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.160619] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.api_max_retries = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.160785] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.api_retry_interval = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.160966] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.auth_section = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.161150] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.auth_type = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.161312] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.161467] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.161629] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.161796] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.conductor_group = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.161976] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.connect_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.162163] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.connect_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.162324] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.endpoint_override = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.162488] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.162648] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.162808] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.max_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.162968] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.min_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.163173] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.peer_list = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.163354] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.region_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.163515] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.retriable_status_codes = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.163679] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.serial_console_state_timeout = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.163842] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.service_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.164017] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.service_type = baremetal {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.164180] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.shard = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.164345] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.164506] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.status_code_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.164663] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.status_code_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.164821] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.165009] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.165178] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ironic.version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.165358] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.165531] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] key_manager.fixed_key = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.165711] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.165871] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.barbican_api_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.166044] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.barbican_endpoint = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.166217] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.barbican_endpoint_type = public {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.166376] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.barbican_region_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.166532] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.166686] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.166845] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.167022] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.167175] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.167335] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.number_of_retries = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.167494] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.retry_delay = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.167653] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.send_service_user_token = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.167814] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.167971] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.168145] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.verify_ssl = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.168303] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican.verify_ssl_path = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.168468] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.auth_section = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.168621] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.auth_type = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.168777] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.168930] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.169104] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.169265] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.169421] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.169582] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.169737] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] barbican_service_user.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.169900] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.approle_role_id = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.170071] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.approle_secret_id = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.170242] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.kv_mountpoint = secret {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.170399] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.kv_path = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.170556] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.kv_version = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.170710] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.namespace = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.170892] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.root_token_id = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.171075] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.ssl_ca_crt_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.171242] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.timeout = 60.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.171452] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.use_ssl = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.171632] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.171805] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.171967] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.172148] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.172310] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.connect_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.172469] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.connect_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.172626] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.endpoint_override = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.172788] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.172947] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.173115] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.max_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.173270] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.min_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.173433] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.region_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.173655] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.retriable_status_codes = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.173828] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.service_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.174011] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.service_type = identity {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.174186] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.174348] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.status_code_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.174507] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.status_code_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.174666] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.174848] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.175021] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] keystone.version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.175227] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.connection_uri = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.175390] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.cpu_mode = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.175557] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.cpu_model_extra_flags = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.175725] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.cpu_models = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.175897] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.cpu_power_governor_high = performance {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.176079] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.cpu_power_governor_low = powersave {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.176245] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.cpu_power_management = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.176411] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.176574] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.device_detach_attempts = 8 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.176735] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.device_detach_timeout = 20 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.176900] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.disk_cachemodes = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.177078] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.disk_prefix = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.177248] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.enabled_perf_events = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.177410] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.file_backed_memory = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.177574] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.gid_maps = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.177731] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.hw_disk_discard = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.177886] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.hw_machine_type = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.178067] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.images_rbd_ceph_conf = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.178242] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.178411] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.178576] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.images_rbd_glance_store_name = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.178742] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.images_rbd_pool = rbd {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.178906] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.images_type = default {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.179077] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.images_volume_group = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.179239] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.inject_key = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.179397] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.inject_partition = -2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.179554] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.inject_password = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.179710] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.iscsi_iface = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.179868] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.iser_use_multipath = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.180041] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_bandwidth = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.180208] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.180367] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_downtime = 500 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.180528] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.180689] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.180876] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_inbound_addr = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.181055] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.181221] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_permit_post_copy = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.181385] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_scheme = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.181558] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_timeout_action = abort {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.181720] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_tunnelled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.181881] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_uri = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.182053] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.live_migration_with_native_tls = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.182217] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.max_queues = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.182380] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.182606] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.182767] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.nfs_mount_options = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.183071] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.183277] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.183448] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.num_iser_scan_tries = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.183617] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.num_memory_encrypted_guests = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.183781] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.183942] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.num_pcie_ports = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.184125] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.num_volume_scan_tries = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.184291] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.pmem_namespaces = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.184450] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.quobyte_client_cfg = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.184747] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.184921] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rbd_connect_timeout = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.185102] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.185269] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.185429] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rbd_secret_uuid = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.185585] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rbd_user = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.185745] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.185915] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.remote_filesystem_transport = ssh {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.186087] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rescue_image_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.186248] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rescue_kernel_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.186404] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rescue_ramdisk_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.186569] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.186727] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.rx_queue_size = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.186893] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.smbfs_mount_options = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.187204] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.187384] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.snapshot_compression = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.187547] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.snapshot_image_format = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.187774] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.187941] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.sparse_logical_volumes = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.188126] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.swtpm_enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.188296] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.swtpm_group = tss {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.188457] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.swtpm_user = tss {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.188622] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.sysinfo_serial = unique {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.188781] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.tb_cache_size = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.188939] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.tx_queue_size = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.189123] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.uid_maps = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.189289] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.use_virtio_for_bridges = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.189457] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.virt_type = kvm {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.189622] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.volume_clear = zero {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.189783] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.volume_clear_size = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.189949] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.volume_use_multipath = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.190122] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.vzstorage_cache_path = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.190290] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.190455] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.vzstorage_mount_group = qemu {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.190621] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.vzstorage_mount_opts = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.190788] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.191114] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.191304] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.vzstorage_mount_user = stack {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.191473] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.191654] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.auth_section = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.191832] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.auth_type = password {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.191996] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.192169] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.192332] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.192489] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.connect_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.192647] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.connect_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.192816] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.default_floating_pool = public {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.192975] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.endpoint_override = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.193195] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.extension_sync_interval = 600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.193422] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.http_retries = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.193598] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.193760] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.193920] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.max_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.194137] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.194265] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.min_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.194430] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.ovs_bridge = br-int {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.194593] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.physnets = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.194770] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.region_name = RegionOne {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.194931] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.retriable_status_codes = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.195119] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.service_metadata_proxy = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.195281] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.service_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.195509] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.service_type = network {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.195695] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.195860] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.status_code_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.196033] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.status_code_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.196200] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.196386] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.196550] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] neutron.version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.196726] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] notifications.bdms_in_notifications = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.196901] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] notifications.default_level = INFO {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.197135] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] notifications.notification_format = unversioned {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.197331] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] notifications.notify_on_state_change = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.197511] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.197690] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] pci.alias = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.197860] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] pci.device_spec = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.198046] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] pci.report_in_placement = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.198235] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.auth_section = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.198407] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.auth_type = password {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.198572] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.198731] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.198885] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.199059] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.199220] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.connect_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.199374] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.connect_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.199527] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.default_domain_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.199681] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.default_domain_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.199834] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.domain_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.199987] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.domain_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.200157] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.endpoint_override = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.200318] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.200476] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.200631] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.max_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.200814] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.min_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.200988] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.password = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.201167] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.project_domain_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.201334] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.project_domain_name = Default {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.201498] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.project_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.201668] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.project_name = service {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.201836] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.region_name = RegionOne {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.202000] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.retriable_status_codes = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.202172] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.service_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.202341] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.service_type = placement {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.202503] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.202661] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.status_code_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.202819] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.status_code_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.202979] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.system_scope = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.203173] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.203352] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.trust_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.203513] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.user_domain_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.203680] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.user_domain_name = Default {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.203837] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.user_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.204018] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.username = nova {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.204205] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.204366] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] placement.version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.204542] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.cores = 20 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.204704] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.count_usage_from_placement = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.204868] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.205045] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.injected_file_content_bytes = 10240 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.205209] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.injected_file_path_length = 255 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.205371] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.injected_files = 5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.205572] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.instances = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.205774] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.key_pairs = 100 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.205942] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.metadata_items = 128 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.206120] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.ram = 51200 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.206283] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.recheck_quota = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.206445] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.server_group_members = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.206606] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] quota.server_groups = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.206783] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.206945] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.207115] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.image_metadata_prefilter = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.207335] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.207512] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.max_attempts = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.207673] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.max_placement_results = 1000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.207835] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.207994] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.query_placement_for_image_type_support = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.208188] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.208367] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] scheduler.workers = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.208540] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.208711] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.208887] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.209070] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.209243] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.209410] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.209573] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.209761] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.209928] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.host_subset_size = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.210108] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.210269] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.210432] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.210594] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.isolated_hosts = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.210753] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.isolated_images = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.210967] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.211187] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.211366] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.211532] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.pci_in_placement = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.211696] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.211859] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.212032] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.212199] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.212409] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.212529] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.212693] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.track_instance_changes = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.212871] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.213056] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] metrics.required = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.213249] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] metrics.weight_multiplier = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.213420] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.213585] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] metrics.weight_setting = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.213904] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.214096] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] serial_console.enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.214278] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] serial_console.port_range = 10000:20000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.214450] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.214616] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.214784] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] serial_console.serialproxy_port = 6083 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.214953] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.auth_section = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.215139] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.auth_type = password {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.215299] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.215456] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.215647] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.215858] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.216041] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.216223] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.send_service_user_token = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.216386] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.216544] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] service_user.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.216729] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.agent_enabled = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.216957] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.217240] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.217493] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.217676] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.html5proxy_port = 6082 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.217842] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.image_compression = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.218012] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.jpeg_compression = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.218204] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.playback_compression = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.218371] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.require_secure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.218542] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.server_listen = 127.0.0.1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.218709] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.218888] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.streaming_mode = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.219070] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] spice.zlib_compression = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.219242] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] upgrade_levels.baseapi = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.219412] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] upgrade_levels.compute = auto {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.219572] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] upgrade_levels.conductor = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.219728] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] upgrade_levels.scheduler = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.219893] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.auth_section = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.220068] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.auth_type = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.220228] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.220383] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.220546] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.220701] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.220887] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.221071] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.221232] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vendordata_dynamic_auth.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.221405] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.api_retry_count = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.221564] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.ca_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.221734] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.cache_prefix = devstack-image-cache {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.221898] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.cluster_name = testcl1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.222075] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.connection_pool_size = 10 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.222239] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.console_delay_seconds = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.222407] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.datastore_regex = ^datastore.* {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.222616] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.222821] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.host_password = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.223032] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.host_port = 443 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.223250] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.host_username = administrator@vsphere.local {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.223432] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.insecure = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.223595] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.integration_bridge = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.223764] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.maximum_objects = 100 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.223918] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.pbm_default_policy = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.224095] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.pbm_enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.224255] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.pbm_wsdl_location = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.224422] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.224581] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.serial_port_proxy_uri = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.224739] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.serial_port_service_uri = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.224902] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.task_poll_interval = 0.5 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.225125] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.use_linked_clone = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.225306] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.vnc_keymap = en-us {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.225470] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.vnc_port = 5900 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.225631] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vmware.vnc_port_total = 10000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.225819] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.auth_schemes = ['none'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.225997] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.226306] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.226503] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.226706] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.novncproxy_port = 6080 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.226882] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.server_listen = 127.0.0.1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.227076] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.227239] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.vencrypt_ca_certs = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.227397] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.vencrypt_client_cert = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.227555] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vnc.vencrypt_client_key = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.227726] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.227885] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.disable_deep_image_inspection = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.228057] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.228221] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.228378] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.228538] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.disable_rootwrap = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.228695] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.enable_numa_live_migration = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.228872] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.229057] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.229223] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.229385] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.libvirt_disable_apic = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.229546] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.229708] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.229877] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.230053] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.230221] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.230379] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.230536] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.230694] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.230878] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.231067] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.231254] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.231422] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.client_socket_timeout = 900 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.231583] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.default_pool_size = 1000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.231745] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.keep_alive = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.231913] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.max_header_line = 16384 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.232085] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.secure_proxy_ssl_header = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.232247] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.ssl_ca_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.232406] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.ssl_cert_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.232592] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.ssl_key_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.232759] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.tcp_keepidle = 600 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.232939] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.233132] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] zvm.ca_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.233306] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] zvm.cloud_connector_url = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.233607] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.233775] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] zvm.reachable_timeout = 300 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.233955] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.enforce_new_defaults = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.234345] env[63297]: WARNING oslo_config.cfg [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 505.234529] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.enforce_scope = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.234699] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.policy_default_rule = default {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.234876] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.235083] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.policy_file = policy.yaml {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.235290] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.235453] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.235613] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.235770] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.235928] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.236110] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_policy.remote_timeout = 60.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.236282] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.236453] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.236626] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.connection_string = messaging:// {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.236795] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.enabled = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.236960] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.es_doc_type = notification {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.237133] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.es_scroll_size = 10000 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.237302] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.es_scroll_time = 2m {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.237461] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.filter_error_trace = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.237625] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.hmac_keys = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.237787] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.sentinel_service_name = mymaster {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.237950] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.socket_timeout = 0.1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.238127] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.trace_requests = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.238285] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler.trace_sqlalchemy = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.238468] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler_jaeger.process_tags = {} {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.238627] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler_jaeger.service_name_prefix = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.238797] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] profiler_otlp.service_name_prefix = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.238982] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] remote_debug.host = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.239158] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] remote_debug.port = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.239342] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.239506] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.239668] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.239830] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.239992] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.240166] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.240326] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.240487] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.240646] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.240840] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.241013] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.241191] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.241357] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.241527] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.241697] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.241900] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.242037] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.242216] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.242378] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.242538] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.242699] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.242863] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.243035] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.243226] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.243398] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.243559] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.243719] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.243879] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.244058] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.244229] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.ssl = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.244401] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.244569] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.244728] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.244898] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.245080] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.ssl_version = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.245245] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.245432] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.245599] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_notifications.retry = -1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.245777] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.245950] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_messaging_notifications.transport_url = **** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.246137] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.auth_section = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.246300] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.auth_type = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.246455] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.cafile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.246612] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.certfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.246772] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.collect_timing = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.246925] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.connect_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.247116] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.connect_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.247304] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.endpoint_id = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.247475] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.endpoint_interface = publicURL {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.247632] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.endpoint_override = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.247784] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.endpoint_region_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.247940] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.endpoint_service_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.248107] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.endpoint_service_type = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.248269] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.insecure = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.248422] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.keyfile = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.248573] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.max_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.248732] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.min_version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.248896] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.region_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.249065] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.retriable_status_codes = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.249228] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.service_name = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.249382] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.service_type = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.249541] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.split_loggers = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.249695] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.status_code_retries = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.249852] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.status_code_retry_delay = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.250014] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.timeout = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.250175] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.valid_interfaces = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.250328] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_limit.version = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.250493] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_reports.file_event_handler = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.250654] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.250826] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] oslo_reports.log_dir = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.251022] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.251194] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.251351] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.251513] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.251673] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.251833] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.252009] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.252176] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_ovs_privileged.group = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.252329] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.252490] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.252651] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.252805] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] vif_plug_ovs_privileged.user = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.252975] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_linux_bridge.flat_interface = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.253188] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.253374] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.253546] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.253716] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.253882] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.254061] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.254227] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.254402] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.254572] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_ovs.isolate_vif = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.254740] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.254908] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.255092] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.255266] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_ovs.ovsdb_interface = native {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.255428] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] os_vif_ovs.per_port_bridge = False {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.255598] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] privsep_osbrick.capabilities = [21] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.255756] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] privsep_osbrick.group = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.255912] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] privsep_osbrick.helper_command = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.256109] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.256275] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.256433] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] privsep_osbrick.user = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.256606] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.256761] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] nova_sys_admin.group = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.256915] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] nova_sys_admin.helper_command = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.257089] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.257252] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.257404] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] nova_sys_admin.user = None {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 505.257533] env[63297]: DEBUG oslo_service.service [None req-dfd5892c-bbe0-46f7-bc00-d4e011cf49b0 None None] ******************************************************************************** {{(pid=63297) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 505.258013] env[63297]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 505.761731] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Getting list of instances from cluster (obj){ [ 505.761731] env[63297]: value = "domain-c8" [ 505.761731] env[63297]: _type = "ClusterComputeResource" [ 505.761731] env[63297]: } {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 505.762896] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c1741f-127f-4d28-b9b1-f78d3fd5b976 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.772227] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Got total of 0 instances {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 505.772754] env[63297]: WARNING nova.virt.vmwareapi.driver [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 505.773237] env[63297]: INFO nova.virt.node [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Generated node identity 88960333-a089-4255-ad72-5c02d57b2b35 [ 505.773460] env[63297]: INFO nova.virt.node [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Wrote node identity 88960333-a089-4255-ad72-5c02d57b2b35 to /opt/stack/data/n-cpu-1/compute_id [ 506.276468] env[63297]: WARNING nova.compute.manager [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Compute nodes ['88960333-a089-4255-ad72-5c02d57b2b35'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 507.282137] env[63297]: INFO nova.compute.manager [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 508.287184] env[63297]: WARNING nova.compute.manager [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 508.287522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.287687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 508.287836] env[63297]: DEBUG oslo_concurrency.lockutils [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 508.287988] env[63297]: DEBUG nova.compute.resource_tracker [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 508.289267] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1b5cc4-7149-4d29-82e6-2710c5857872 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.297421] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55556b3-b052-4188-86b8-286c0dcae039 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.311063] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ff6ce2-f8f3-4c6e-9a46-b08988a4f5f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.317332] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67258bee-46fc-427f-a887-a465cbcd5a30 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.346473] env[63297]: DEBUG nova.compute.resource_tracker [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181388MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 508.346615] env[63297]: DEBUG oslo_concurrency.lockutils [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.346805] env[63297]: DEBUG oslo_concurrency.lockutils [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 508.849727] env[63297]: WARNING nova.compute.resource_tracker [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] No compute node record for cpu-1:88960333-a089-4255-ad72-5c02d57b2b35: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 88960333-a089-4255-ad72-5c02d57b2b35 could not be found. [ 509.353685] env[63297]: INFO nova.compute.resource_tracker [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 88960333-a089-4255-ad72-5c02d57b2b35 [ 510.861707] env[63297]: DEBUG nova.compute.resource_tracker [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 510.862100] env[63297]: DEBUG nova.compute.resource_tracker [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 511.030278] env[63297]: INFO nova.scheduler.client.report [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] [req-e8a3d741-3913-4a5b-a783-df354a1f51e7] Created resource provider record via placement API for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 511.062935] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114f01ee-2a85-4ab7-959e-eeac2bf74aa0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.070979] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417e72fc-37e3-4301-ba82-552bb2c7c28f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.101475] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80a7832-2c01-4f8f-8c05-b621f3625687 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.108860] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdfae21-d2e7-4f80-9b3b-0018675bd24d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.122235] env[63297]: DEBUG nova.compute.provider_tree [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 511.658015] env[63297]: DEBUG nova.scheduler.client.report [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 511.658255] env[63297]: DEBUG nova.compute.provider_tree [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 0 to 1 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 511.658399] env[63297]: DEBUG nova.compute.provider_tree [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 511.705551] env[63297]: DEBUG nova.compute.provider_tree [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 1 to 2 during operation: update_traits {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 512.210764] env[63297]: DEBUG nova.compute.resource_tracker [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 512.211166] env[63297]: DEBUG oslo_concurrency.lockutils [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.864s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 512.211166] env[63297]: DEBUG nova.service [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Creating RPC server for service compute {{(pid=63297) start /opt/stack/nova/nova/service.py:186}} [ 512.225059] env[63297]: DEBUG nova.service [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] Join ServiceGroup membership for this service compute {{(pid=63297) start /opt/stack/nova/nova/service.py:203}} [ 512.225283] env[63297]: DEBUG nova.servicegroup.drivers.db [None req-40cfba39-39b0-4b0b-817e-8e9bbe98dad9 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63297) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 528.230730] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_power_states {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.734064] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Getting list of instances from cluster (obj){ [ 528.734064] env[63297]: value = "domain-c8" [ 528.734064] env[63297]: _type = "ClusterComputeResource" [ 528.734064] env[63297]: } {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 528.734428] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a605f4-3e34-4705-9927-3042d8247849 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.743779] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Got total of 0 instances {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 528.744010] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.744311] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Getting list of instances from cluster (obj){ [ 528.744311] env[63297]: value = "domain-c8" [ 528.744311] env[63297]: _type = "ClusterComputeResource" [ 528.744311] env[63297]: } {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 528.745136] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9a6b74-1f7f-4f89-a25f-c0d1ac1a3dfc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.752665] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Got total of 0 instances {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 560.674320] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 560.674763] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 560.674863] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 560.674955] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 561.177553] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 561.177814] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.178025] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.178242] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.178433] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.178616] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.178797] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.178957] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 561.179120] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.682707] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.683171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.683171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.683319] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 561.684169] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6a71ad-a854-42e3-99c4-6de6d5818313 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.692556] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816dfa4c-abb9-4550-bca0-5fe63e36511a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.705719] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efa0d76-318b-475a-887c-9fa1b6e5be91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.712050] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435ae597-2305-4382-ba1e-181e0277f189 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.740674] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181380MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 561.740819] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.740984] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.759300] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 562.759602] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 562.773647] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bc9c8b-3e19-4710-9608-3ca4996ab384 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.781009] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc2db6c-2b78-4838-b7b0-002fc5db0cf2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.809432] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade3c0b0-a738-42be-ad9f-6b1a1fa8c9a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.816065] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ab380d-4feb-4934-9bd7-051a700dccfb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.828478] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.332350] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 563.837348] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 563.837752] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.097s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.823759] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.824196] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.329477] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.329675] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 624.329800] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 624.835653] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 624.836051] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.836051] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.836216] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.836351] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.836490] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.836631] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.836757] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 624.836894] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.340338] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.340581] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.340688] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.340859] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 625.341882] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed76d2b-633c-41a4-a462-df71515feb6a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.350379] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8523d7-3bf5-4b5d-b1f2-ee9fa1ba0c74 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.364075] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f31eb60-6ff0-43bc-b184-fd3ee8f838a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.370644] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9171b5e-08b8-4f88-bf32-25afa6c00e7b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.398034] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 625.398172] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.398357] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.416552] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 626.416920] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 626.429441] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90330b64-9a56-499d-854b-27cf458ef8d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.437283] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cb1da0-6754-44b0-b65d-801f37f8457f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.465672] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb026120-a10a-46f9-a713-2239dbb1d336 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.472721] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34263732-df91-4519-b21c-81a287f1365d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.485914] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.989511] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 626.990734] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 626.990919] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.992930] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.993402] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.993402] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 686.993479] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 687.497022] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 687.497268] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.497410] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.497553] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.497700] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.497875] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.498065] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.498195] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 687.498332] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.001742] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.002206] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.002206] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.002342] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 688.003189] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ded0a7-5e4c-4bba-95a5-f5e77054db34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.011400] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb88292-b5f5-496a-9fd2-66712d9001c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.025132] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501e53e4-a4ed-4734-a273-7af4e6749b93 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.031115] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b308460c-b9e7-49db-b0f4-9e755c61b09d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.058320] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 688.058471] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.058649] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.076091] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 689.076324] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 689.088634] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2631a629-a573-41ec-86cb-08cbd475950b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.095993] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e3acbe-0759-496f-b143-004490b1b002 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.125505] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0116cd72-86d2-443b-9354-c41e2a68e934 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.132068] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737a25a0-5ab4-4d3d-9f55-0e74c9c5a7b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.144431] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.647953] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 689.649215] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 689.649396] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.318422] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 744.318786] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 744.823813] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 744.824026] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 744.824110] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 745.327161] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 745.327576] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 745.327576] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 745.327727] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 745.327820] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 745.327944] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 745.328098] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 745.831175] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.831446] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.831608] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.831775] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 745.832711] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299e3979-4630-42ef-9caa-822034d89b84 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.842084] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce26d99-7a69-4e3c-a359-92c3389d6d54 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.855929] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2b4a53-6e2e-4220-8b16-e0aed27b48b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.862332] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ac786d-95ac-43f1-a10d-16332b4112a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.890208] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181427MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 745.890374] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.890524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.908493] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 746.908734] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 746.921843] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072fe004-9987-49ff-8144-a7dfc4755747 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.929479] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1778ff1e-788b-4efb-ac75-afa1be497f5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.959752] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99bc9b9-23c2-405f-a1dd-c6970480b9d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.966813] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81260dd9-e9e4-4b53-a815-bb55483c1ae0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.979417] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.482748] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.484206] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 747.484459] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.822179] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 747.822345] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.665374] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.665819] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 801.169308] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] There are 0 instances to clean {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 801.169546] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.169689] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances with incomplete migration {{(pid=63297) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 801.672528] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.175023] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.175396] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.665508] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.665718] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 803.665915] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.169475] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.169747] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.169906] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.170128] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 804.170998] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9abd5a-df45-402e-82bc-8e0c7cc0e07b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.179174] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a39fe9-f500-41af-a043-3a4c8aa7a034 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.193363] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93e199b-8bf7-4d5c-acb2-b57a998146d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.199410] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1a4096-e304-4ef4-b8b5-a67976fb0b11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.227342] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181424MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 804.227480] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.227658] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.245720] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 805.245957] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 805.258422] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73ad787-88c4-4f81-b3a5-c48b13191fdd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.266050] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8a8b0f-d78b-434f-9dbf-703d7d6194c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.295262] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e414fa2b-41f6-4bbc-9724-4cb4453904f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.302191] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab98e8a7-807f-409b-938d-5df9cf2d7bf6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.314708] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.818089] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.819330] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 805.819507] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.820749] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.820749] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.820749] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 806.821282] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 807.324325] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 807.324558] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.324682] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.324830] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.665291] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.169506] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.169675] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.666864] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.666864] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.666864] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.666864] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 865.666864] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.170175] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.170175] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.170175] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.170175] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 866.171611] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1465bbd1-3171-44b5-b5a5-a189dca09c15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.180886] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efedd9ca-7fb1-4e16-bb1b-a17971f549d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.195876] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a95ac07-3a86-45c5-91cb-736e0a4ad609 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.202601] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10bf1f76-b2da-4045-bf11-4be165f833cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.230892] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181428MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 866.231033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.231220] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.263875] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 867.264128] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 867.282644] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 867.297262] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 867.297440] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 867.309186] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 867.324262] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 867.335074] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb43006-fe89-4d1a-b3a4-fa74f93f2632 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.342436] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a5ed33-4f5a-4711-ab6c-dacdcd2dbcd9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.372155] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0199bd9-94d2-45a2-8f11-c5a154c221d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.378913] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69dc0292-9043-487a-931e-d3edf6aed3f9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.391334] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.894234] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 867.895460] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 867.895644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.664s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.895361] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.895713] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.895713] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 868.895842] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 869.399644] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 869.399874] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.665239] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.665627] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.665484] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.665893] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 927.664965] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.666572] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.666572] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.666572] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.168576] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.168818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.168992] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.169194] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 928.170153] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba686b8d-8197-4757-aa36-3c3cb834633f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.178179] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdb9d6a-0b48-47e5-bdde-f3f44e572f74 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.192222] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023f4977-2f82-4c49-830a-0e23023a55aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.198131] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c459cf-10e0-49f3-b0b0-74d6bb61cf46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.226132] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181431MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 928.226284] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.226472] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.244142] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 929.244417] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 929.257100] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fb881d-5364-46ed-9cd1-5dafec4131bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.264712] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d209c437-c12d-4fae-9c9f-8fbf12af61ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.295218] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d46d6a-bd5d-4e45-bfd1-8018d96e48f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.302749] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508009ce-71d0-4bef-97c2-c2436d2cc064 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.315720] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.818859] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 929.820122] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 929.820300] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.815859] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.816357] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.816357] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 930.816357] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 931.319014] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 984.665901] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.665678] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.661387] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.165875] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.166149] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.166253] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 988.166428] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.669532] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.669894] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.669933] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.670084] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 988.670996] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19df1263-6600-42e8-ace9-e0ce0b8d73bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.679225] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4aa06d8-88ef-4d7e-bcdc-7e1853317d9b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.692622] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6872547-202e-4877-b044-ba0a48c4d176 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.698475] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad912dc-2ff3-465b-9536-34bceb3aa079 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.726860] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181419MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 988.727062] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.727197] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.744916] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 989.745165] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 989.757813] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074a50b3-156b-479d-8c9f-8f81e0683375 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.765016] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb9209a-42d5-46a4-9fc1-6a859e10f182 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.793163] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cd28a4-b3ee-4898-8ec8-c451cd6652aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.799561] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2a8a93-1675-4a13-b35f-46077fdf1a9f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.812513] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.315821] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 990.317081] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 990.317259] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.816136] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.816502] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.816502] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 990.816573] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 991.319384] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 991.319567] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.319725] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.667199] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.665067] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.666164] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.666164] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1049.665914] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.666188] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.170052] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.170381] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.170530] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.170683] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1050.171620] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba8ca20-71e8-4b2d-82a8-26df9f5b613d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.180079] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0097236f-3ccb-43aa-8e9d-2242ea83e365 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.193570] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6b5e32-5e8c-4e00-afe7-62be558c27e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.199502] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879d1d31-ba85-4e3d-a18b-1f0166d37cb8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.227905] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181421MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1050.228093] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.228321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.246523] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1051.246763] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1051.260340] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8cc6fd-21f3-4f4f-ab46-07a2009bc0b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.267969] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d53ee35-4692-4bca-8444-09f85fd67a4d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.296181] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c979144-f72c-429c-874f-d3c6cf2217ff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.302753] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1db27a4-243c-4bc8-adc7-211cba24ff62 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.315238] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.818388] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1051.819633] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1051.819816] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.819395] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.819644] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.819768] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1052.819884] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1053.323749] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1053.323990] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.324165] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.665405] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.168373] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.665660] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.665859] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1106.169376] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] There are 0 instances to clean {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1106.667031] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.667031] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances with incomplete migration {{(pid=63297) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1110.168559] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.168954] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.168954] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1110.665210] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.665446] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.172572] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.173130] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.173130] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.173898] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1111.174473] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3ac080-5829-4b95-8d8b-3a04fe5b653d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.182908] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41c5425-44a4-4b72-b850-27e0726167b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.196706] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd57a16-2381-4487-81e0-7fe2798d98fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.202900] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4b1a6d-9233-4f26-99dd-a1045577f4de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.231351] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181427MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1111.231514] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.231664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.250179] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1112.250433] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1112.263948] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a3e644-57d6-4c5b-8502-bc44171ad45b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.271656] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76e33ac-460f-4095-af1b-65ac4d6e8b47 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.306407] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6945c434-7d9c-435d-84a9-8b497966bce8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.314111] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6352cb2c-6ae9-4eb8-a3a1-4673c0e5d6e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.327138] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.830321] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1112.831653] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1112.831839] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.600s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.827533] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.333617] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.333821] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1114.333939] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1114.836674] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1114.837099] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.837099] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.670528] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.231427] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_power_states {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.734527] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Getting list of instances from cluster (obj){ [ 1128.734527] env[63297]: value = "domain-c8" [ 1128.734527] env[63297]: _type = "ClusterComputeResource" [ 1128.734527] env[63297]: } {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1128.735575] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7742adb3-4e4a-44f7-91d9-29e859a6b728 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.744260] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Got total of 0 instances {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1161.675636] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquiring lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.675892] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.179733] env[63297]: DEBUG nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1162.714066] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.714326] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.716517] env[63297]: INFO nova.compute.claims [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1163.762648] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0ee292-d465-495a-bbf0-892775a64413 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.774077] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b750c92-ea04-4bf2-9c00-ec71c4ad01a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.811251] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b56dfb-a0c4-4082-9491-4cd3c09ad4f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.819320] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6f8646-b192-433f-aee5-e12440487e00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.841106] env[63297]: DEBUG nova.compute.provider_tree [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.345440] env[63297]: DEBUG nova.scheduler.client.report [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1164.842737] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquiring lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.844080] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.850823] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.136s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.852524] env[63297]: DEBUG nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1165.342406] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.342762] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.346293] env[63297]: DEBUG nova.compute.manager [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1165.360149] env[63297]: DEBUG nova.compute.utils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1165.360282] env[63297]: DEBUG nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1165.360523] env[63297]: DEBUG nova.network.neutron [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1165.801418] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "ef3346b1-ce09-4616-bdf4-200ea31efd01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.801703] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "ef3346b1-ce09-4616-bdf4-200ea31efd01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.845610] env[63297]: DEBUG nova.compute.manager [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1165.868024] env[63297]: DEBUG nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1165.888140] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.888436] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.890542] env[63297]: INFO nova.compute.claims [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1166.306136] env[63297]: DEBUG nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1166.375775] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.881429] env[63297]: DEBUG nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1167.178693] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.383898] env[63297]: DEBUG nova.policy [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '476e8c51dcf14460bfe64164e18ae6a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '79e790f0304047138bab45773deee79d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1167.647525] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1167.647842] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1167.647932] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.648101] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1167.648248] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.648394] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1167.648602] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1167.648756] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1167.648918] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1167.649104] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1167.649315] env[63297]: DEBUG nova.virt.hardware [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1167.650285] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8651a1d1-5b97-484b-9992-35fe13d185c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.662260] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01985fd4-88bd-4989-8837-42f7fd572fa0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.682737] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431b655a-f290-452f-9ef3-56fd5271db75 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.772608] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0df1035-4977-4a3d-8377-7122447d40e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.781049] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6913308-a021-4f27-9a17-a30bb4aa8c35 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.814771] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3944675-422d-4d5a-a734-6864457fac6e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.824205] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01258a5-559f-49d1-a565-74eb5da0ba38 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.841171] env[63297]: DEBUG nova.compute.provider_tree [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.252019] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.346142] env[63297]: DEBUG nova.scheduler.client.report [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1168.589047] env[63297]: DEBUG nova.network.neutron [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Successfully created port: 76456429-f255-412c-910b-a21b9ee6408a {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1168.858102] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.967s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.858102] env[63297]: DEBUG nova.compute.manager [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1168.859053] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.483s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.860436] env[63297]: INFO nova.compute.claims [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1169.367138] env[63297]: DEBUG nova.compute.utils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1169.368599] env[63297]: DEBUG nova.compute.manager [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Not allocating networking since 'none' was specified. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1169.551376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.551376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.874869] env[63297]: DEBUG nova.compute.manager [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1169.906214] env[63297]: DEBUG nova.scheduler.client.report [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1169.937046] env[63297]: DEBUG nova.scheduler.client.report [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1169.937294] env[63297]: DEBUG nova.compute.provider_tree [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1169.960950] env[63297]: DEBUG nova.scheduler.client.report [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1169.994598] env[63297]: DEBUG nova.scheduler.client.report [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1170.056988] env[63297]: DEBUG nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1170.103118] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2310ced4-ca24-4dc7-9e0a-0cc63bca127e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.113865] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a43b6b-33b5-4c00-aa79-75e085334c03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.149473] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7524d56-b6a1-4fc3-8261-ab52d3312344 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.158800] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbf4b04-edbe-4d17-842c-bb0e090e44d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.178285] env[63297]: DEBUG nova.compute.provider_tree [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1170.586813] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.665535] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.665842] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.681107] env[63297]: DEBUG nova.scheduler.client.report [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1170.886789] env[63297]: DEBUG nova.compute.manager [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1170.924460] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1170.925028] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1170.925960] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1170.926338] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1170.926669] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1170.926991] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1170.927404] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1170.934355] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1170.934355] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1170.934355] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1170.934355] env[63297]: DEBUG nova.virt.hardware [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1170.934355] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7346e8b7-be00-476d-8f00-38da54f656c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.945151] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55ff6a2-35a5-4122-b27d-b777c476c15c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.964680] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1170.976591] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1170.977482] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38df4b62-2ed2-4485-8417-7c0911c1abcf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.983497] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.983497] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.999102] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Created folder: OpenStack in parent group-v4. [ 1170.999323] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Creating folder: Project (0eea8110fbb64e0486fc0d094800f129). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1170.999570] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef1ae3ef-d407-4d54-9c58-ef2f85c6b74f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.009850] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Created folder: Project (0eea8110fbb64e0486fc0d094800f129) in parent group-v353718. [ 1171.010090] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Creating folder: Instances. Parent ref: group-v353719. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1171.010341] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c1bc86d-e7e8-480e-9096-1a71bc0fc4be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.018669] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Created folder: Instances in parent group-v353719. [ 1171.019231] env[63297]: DEBUG oslo.service.loopingcall [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1171.019231] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1171.019426] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-228d3c9b-2bc1-4ac1-8be6-6d40edb4c522 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.038619] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1171.038619] env[63297]: value = "task-1696775" [ 1171.038619] env[63297]: _type = "Task" [ 1171.038619] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.047563] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696775, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.189471] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.189983] env[63297]: DEBUG nova.compute.manager [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1171.192741] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.944s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.194794] env[63297]: INFO nova.compute.claims [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1171.488659] env[63297]: DEBUG nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1171.551804] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696775, 'name': CreateVM_Task, 'duration_secs': 0.317354} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.552730] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1171.556599] env[63297]: DEBUG oslo_vmware.service [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ca0287-2def-4efe-8e3f-d5e847624520 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.564278] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.564548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.565147] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1171.565698] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec84d554-3d6f-4e54-88ab-939be02e34a3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.571362] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1171.571362] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5241165c-f5a5-d123-adab-98883f311ead" [ 1171.571362] env[63297]: _type = "Task" [ 1171.571362] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.581363] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5241165c-f5a5-d123-adab-98883f311ead, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.618174] env[63297]: DEBUG nova.network.neutron [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Successfully updated port: 76456429-f255-412c-910b-a21b9ee6408a {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1171.665448] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.666398] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.666723] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1171.699241] env[63297]: DEBUG nova.compute.utils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1171.701113] env[63297]: DEBUG nova.compute.manager [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Not allocating networking since 'none' was specified. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1172.014582] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.087897] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.088548] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1172.089264] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1172.089264] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.089612] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.089994] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41161044-1604-4268-8e97-e4bbe68123c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.109199] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.109410] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1172.110391] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b626c42a-f9e9-41e6-9664-a71895dcee6a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.119352] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d576d197-2a4e-480a-b80b-395da92eb66b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.120743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquiring lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1172.121028] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquired lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.121465] env[63297]: DEBUG nova.network.neutron [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1172.129060] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1172.129060] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524a080e-cec5-3b2a-398d-1378f265964a" [ 1172.129060] env[63297]: _type = "Task" [ 1172.129060] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.136119] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524a080e-cec5-3b2a-398d-1378f265964a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.203699] env[63297]: DEBUG nova.compute.manager [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1172.378507] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33105ea5-92ac-4045-b1e0-f50b37cdf0c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.386684] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbbf483-23d7-4e53-8c56-58e71f8fd2e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.420730] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e830f029-5267-41be-9eef-530c15a76631 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.429392] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6aba6c-22c5-46a1-9a79-4f0b7687a84e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.446854] env[63297]: DEBUG nova.compute.provider_tree [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.647585] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Preparing fetch location {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1172.648142] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Creating directory with path [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.648142] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5b070ab-65cb-4832-8e00-53dc49a217b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.669166] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.669979] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquiring lock "8adfd26f-1012-4e52-9371-e9d3f654046c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.670214] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "8adfd26f-1012-4e52-9371-e9d3f654046c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.674288] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Created directory with path [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.674288] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Fetch image to [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/tmp-sparse.vmdk {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1172.674288] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Downloading image file data 41f1ad71-37f2-4e86-a900-da4965eba44f to [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/tmp-sparse.vmdk on the data store datastore1 {{(pid=63297) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1172.674288] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb9fed2-c83e-481a-9745-c583e87959cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.683406] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b7e78b-f763-4c6d-b4f2-c18b5752880d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.699243] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522a1a59-8837-4d51-83e7-5a18910b72fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.738979] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75e5499-45f2-40ea-adcd-b8edfd2095e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.746087] env[63297]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-654f0cb2-3cd9-498c-a2a6-24e5e7814b8c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.769014] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Downloading image file data 41f1ad71-37f2-4e86-a900-da4965eba44f to the data store datastore1 {{(pid=63297) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1172.813361] env[63297]: DEBUG nova.network.neutron [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1172.849600] env[63297]: DEBUG oslo_vmware.rw_handles [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63297) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1172.952348] env[63297]: DEBUG nova.scheduler.client.report [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1173.114414] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "ef851d71-788d-42f8-a824-5d30a89e957b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.115495] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "ef851d71-788d-42f8-a824-5d30a89e957b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.176547] env[63297]: DEBUG nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1173.179808] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.240699] env[63297]: DEBUG nova.compute.manager [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1173.296035] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1173.296035] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1173.296035] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1173.296035] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1173.296301] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1173.296301] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1173.296301] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1173.296440] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1173.296620] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1173.296820] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1173.296996] env[63297]: DEBUG nova.virt.hardware [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1173.298769] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c489b737-5724-4934-bdee-a593842e34ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.309022] env[63297]: DEBUG nova.network.neutron [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Updating instance_info_cache with network_info: [{"id": "76456429-f255-412c-910b-a21b9ee6408a", "address": "fa:16:3e:55:1f:b4", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76456429-f2", "ovs_interfaceid": "76456429-f255-412c-910b-a21b9ee6408a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.318741] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb84c472-e929-4ff6-9056-43dd1a9d02cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.336737] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1173.342406] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Creating folder: Project (2f5ca7cfd4844ac88d69a0f5efe12375). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1173.347196] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08420573-6e8c-4195-87bc-2214ffa94fa2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.358713] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Created folder: Project (2f5ca7cfd4844ac88d69a0f5efe12375) in parent group-v353718. [ 1173.358918] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Creating folder: Instances. Parent ref: group-v353722. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1173.359289] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-759f7182-7dae-43f0-99c2-fec6c0bcb9ad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.370295] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Created folder: Instances in parent group-v353722. [ 1173.370670] env[63297]: DEBUG oslo.service.loopingcall [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1173.372848] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1173.373201] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28eb06c2-fa0d-483e-9d36-7e2557890082 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.396159] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1173.396159] env[63297]: value = "task-1696778" [ 1173.396159] env[63297]: _type = "Task" [ 1173.396159] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.408904] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696778, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.459055] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.266s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.459879] env[63297]: DEBUG nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1173.463660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.877s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.466670] env[63297]: INFO nova.compute.claims [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1173.509652] env[63297]: DEBUG oslo_vmware.rw_handles [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Completed reading data from the image iterator. {{(pid=63297) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1173.510050] env[63297]: DEBUG oslo_vmware.rw_handles [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1173.618276] env[63297]: DEBUG nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1173.652641] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Downloaded image file data 41f1ad71-37f2-4e86-a900-da4965eba44f to vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/tmp-sparse.vmdk on the data store datastore1 {{(pid=63297) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1173.657446] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Caching image {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1173.657446] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Copying Virtual Disk [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/tmp-sparse.vmdk to [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1173.657446] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3edb8643-c9ac-4827-a67e-5875ab23425e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.666837] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1173.666837] env[63297]: value = "task-1696779" [ 1173.666837] env[63297]: _type = "Task" [ 1173.666837] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.679402] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.699704] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.782108] env[63297]: DEBUG nova.compute.manager [req-c58891e9-83af-4a76-a1b0-cbef634f7bae req-9bc6171c-f6c9-42c4-b901-f76ebfe80041 service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Received event network-vif-plugged-76456429-f255-412c-910b-a21b9ee6408a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1173.782406] env[63297]: DEBUG oslo_concurrency.lockutils [req-c58891e9-83af-4a76-a1b0-cbef634f7bae req-9bc6171c-f6c9-42c4-b901-f76ebfe80041 service nova] Acquiring lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.782652] env[63297]: DEBUG oslo_concurrency.lockutils [req-c58891e9-83af-4a76-a1b0-cbef634f7bae req-9bc6171c-f6c9-42c4-b901-f76ebfe80041 service nova] Lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.782860] env[63297]: DEBUG oslo_concurrency.lockutils [req-c58891e9-83af-4a76-a1b0-cbef634f7bae req-9bc6171c-f6c9-42c4-b901-f76ebfe80041 service nova] Lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.783078] env[63297]: DEBUG nova.compute.manager [req-c58891e9-83af-4a76-a1b0-cbef634f7bae req-9bc6171c-f6c9-42c4-b901-f76ebfe80041 service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] No waiting events found dispatching network-vif-plugged-76456429-f255-412c-910b-a21b9ee6408a {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1173.783277] env[63297]: WARNING nova.compute.manager [req-c58891e9-83af-4a76-a1b0-cbef634f7bae req-9bc6171c-f6c9-42c4-b901-f76ebfe80041 service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Received unexpected event network-vif-plugged-76456429-f255-412c-910b-a21b9ee6408a for instance with vm_state building and task_state spawning. [ 1173.812322] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Releasing lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1173.812650] env[63297]: DEBUG nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Instance network_info: |[{"id": "76456429-f255-412c-910b-a21b9ee6408a", "address": "fa:16:3e:55:1f:b4", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76456429-f2", "ovs_interfaceid": "76456429-f255-412c-910b-a21b9ee6408a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1173.813517] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:1f:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76456429-f255-412c-910b-a21b9ee6408a', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1173.829458] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Creating folder: Project (79e790f0304047138bab45773deee79d). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1173.829522] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d062ffd-6991-4cd2-a0c5-021fc14704b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.840557] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Created folder: Project (79e790f0304047138bab45773deee79d) in parent group-v353718. [ 1173.840557] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Creating folder: Instances. Parent ref: group-v353725. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1173.840557] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8228138-6f56-4644-96c0-c410024664da {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.852203] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Created folder: Instances in parent group-v353725. [ 1173.852203] env[63297]: DEBUG oslo.service.loopingcall [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1173.852203] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1173.852203] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f412e10-972e-4481-a089-c94051d2c680 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.877022] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1173.877022] env[63297]: value = "task-1696782" [ 1173.877022] env[63297]: _type = "Task" [ 1173.877022] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.885788] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696782, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.910946] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696778, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.978026] env[63297]: DEBUG nova.compute.utils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1173.982597] env[63297]: DEBUG nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1173.982912] env[63297]: DEBUG nova.network.neutron [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1174.071013] env[63297]: DEBUG nova.policy [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30253d72cb1f4a9faa9b616ad418d9e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1348601359224186bf59b12bfa5f1ef0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1174.154229] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.181529] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696779, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.201846] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "961c3a87-7f53-4764-b8a4-40a408a30f90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.202035] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.389661] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696782, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.409134] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696778, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.490903] env[63297]: DEBUG nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1174.606782] env[63297]: DEBUG nova.network.neutron [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Successfully created port: c914211d-555e-4b13-b990-64105599d395 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1174.682699] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714587} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.686755] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Copied Virtual Disk [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/tmp-sparse.vmdk to [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1174.686974] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Deleting the datastore file [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f/tmp-sparse.vmdk {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1174.687521] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a42ed9f4-853a-4e02-bc80-8c48a8546334 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.695383] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1174.695383] env[63297]: value = "task-1696783" [ 1174.695383] env[63297]: _type = "Task" [ 1174.695383] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.709397] env[63297]: DEBUG nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1174.712778] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.806380] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4db99c-aeb7-42dd-855f-41e08114041f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.821137] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6155af9-0c1c-4118-b521-bc9def7bf3a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.857215] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db965d39-1875-4aa2-bcb9-ddbc3774cfcf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.864919] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad82ba9-819c-4bdc-8b16-e8c14ad5d42b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.878168] env[63297]: DEBUG nova.compute.provider_tree [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.888381] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696782, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.909041] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696778, 'name': CreateVM_Task, 'duration_secs': 1.321912} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.909251] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1174.912504] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1174.912504] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.912504] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1174.912504] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-026c2a08-ae36-41ff-8137-46db051091e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.917725] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1174.917725] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522c0cef-5094-0f12-c0a2-056461187c1f" [ 1174.917725] env[63297]: _type = "Task" [ 1174.917725] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.926895] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522c0cef-5094-0f12-c0a2-056461187c1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.205622] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022447} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.205899] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1175.208726] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Moving file from [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe/41f1ad71-37f2-4e86-a900-da4965eba44f to [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f. {{(pid=63297) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1175.209763] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-cd6239d4-a5d1-4437-9b6e-8b29588a9df7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.227468] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1175.227468] env[63297]: value = "task-1696784" [ 1175.227468] env[63297]: _type = "Task" [ 1175.227468] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.239792] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696784, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.254689] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.381636] env[63297]: DEBUG nova.scheduler.client.report [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1175.398227] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696782, 'name': CreateVM_Task, 'duration_secs': 1.278889} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.398402] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1175.413371] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.429075] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1175.429427] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1175.429661] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.429870] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.430303] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1175.430432] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f962505-42ef-4d57-b4c5-dadce8fad7fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.435178] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1175.435178] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52034d0a-2d87-a474-75d1-27b2fc1947a8" [ 1175.435178] env[63297]: _type = "Task" [ 1175.435178] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.443079] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52034d0a-2d87-a474-75d1-27b2fc1947a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.500938] env[63297]: DEBUG nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1175.535818] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1175.535818] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1175.535990] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1175.536103] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1175.536256] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1175.536446] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1175.536763] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1175.536810] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1175.537017] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1175.537553] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1175.537956] env[63297]: DEBUG nova.virt.hardware [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1175.539171] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc06d6c5-a38b-4f9d-a4af-c6d96b89becf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.548535] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4b2a9e-c283-4233-9080-1829913adf50 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.739802] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696784, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.037469} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.740263] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] File moved {{(pid=63297) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1175.740523] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Cleaning up location [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1175.740710] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Deleting the datastore file [datastore1] vmware_temp/005767d1-f419-4183-aa88-629129644fbe {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1175.740947] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74f21463-a13a-45b5-b80d-6f4c8d9f2675 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.751281] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1175.751281] env[63297]: value = "task-1696785" [ 1175.751281] env[63297]: _type = "Task" [ 1175.751281] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.763891] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.889931] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.894889] env[63297]: DEBUG nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1175.894889] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.879s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.897024] env[63297]: INFO nova.compute.claims [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1175.951499] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52034d0a-2d87-a474-75d1-27b2fc1947a8, 'name': SearchDatastore_Task, 'duration_secs': 0.008268} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.952482] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1175.953063] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1175.953063] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.267498] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026437} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.267794] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.269375] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f735e9f-c01c-432c-808b-8af079ba5b57 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.275696] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1176.275696] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f170c2-8ed6-bddc-7175-bcbf21eaf0f6" [ 1176.275696] env[63297]: _type = "Task" [ 1176.275696] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.285023] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f170c2-8ed6-bddc-7175-bcbf21eaf0f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.404723] env[63297]: DEBUG nova.compute.utils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1176.407711] env[63297]: DEBUG nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1176.407784] env[63297]: DEBUG nova.network.neutron [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1176.542632] env[63297]: DEBUG nova.policy [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42c1eef518d7422c83cb75c14edc2e5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4701357dafc84216ae883b6e88b34d5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1176.573967] env[63297]: DEBUG nova.network.neutron [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Successfully updated port: c914211d-555e-4b13-b990-64105599d395 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1176.679292] env[63297]: DEBUG nova.compute.manager [req-4d8daca8-82cd-4e05-ac16-14ec96f9ed10 req-30d5a2bb-7d8d-4455-9947-9bbe42f474cc service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Received event network-changed-76456429-f255-412c-910b-a21b9ee6408a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1176.679621] env[63297]: DEBUG nova.compute.manager [req-4d8daca8-82cd-4e05-ac16-14ec96f9ed10 req-30d5a2bb-7d8d-4455-9947-9bbe42f474cc service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Refreshing instance network info cache due to event network-changed-76456429-f255-412c-910b-a21b9ee6408a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1176.679891] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d8daca8-82cd-4e05-ac16-14ec96f9ed10 req-30d5a2bb-7d8d-4455-9947-9bbe42f474cc service nova] Acquiring lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.679891] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d8daca8-82cd-4e05-ac16-14ec96f9ed10 req-30d5a2bb-7d8d-4455-9947-9bbe42f474cc service nova] Acquired lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.681682] env[63297]: DEBUG nova.network.neutron [req-4d8daca8-82cd-4e05-ac16-14ec96f9ed10 req-30d5a2bb-7d8d-4455-9947-9bbe42f474cc service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Refreshing network info cache for port 76456429-f255-412c-910b-a21b9ee6408a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1176.788800] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f170c2-8ed6-bddc-7175-bcbf21eaf0f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009828} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.789179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1176.789464] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a/7f8849fb-c5d6-47a1-8079-08dfb2e0b85a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1176.789749] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.789925] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1176.790154] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbbd5ccb-46ff-4e14-bd98-bb497e9c46cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.792893] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8597890f-312b-49ca-8b4b-7e99037cfb60 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.803906] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1176.803906] env[63297]: value = "task-1696786" [ 1176.803906] env[63297]: _type = "Task" [ 1176.803906] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.818526] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.823742] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1176.823921] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1176.824657] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eca6608-e085-464a-87e2-c214ce4996ff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.831225] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1176.831225] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5239b933-fcf4-8011-b4bd-a950d005b17a" [ 1176.831225] env[63297]: _type = "Task" [ 1176.831225] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.841652] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5239b933-fcf4-8011-b4bd-a950d005b17a, 'name': SearchDatastore_Task, 'duration_secs': 0.007561} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.842451] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fcd7103-f9b4-4033-83a1-bad552c4b5e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.849977] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1176.849977] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522cc3bf-c359-fa46-4bf6-e23122c08224" [ 1176.849977] env[63297]: _type = "Task" [ 1176.849977] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.860654] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522cc3bf-c359-fa46-4bf6-e23122c08224, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.912234] env[63297]: DEBUG nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1177.078277] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "refresh_cache-ef3346b1-ce09-4616-bdf4-200ea31efd01" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.080247] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "refresh_cache-ef3346b1-ce09-4616-bdf4-200ea31efd01" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.080247] env[63297]: DEBUG nova.network.neutron [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1177.171393] env[63297]: DEBUG nova.network.neutron [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Successfully created port: de9a987f-1f14-4610-a5aa-6271a1dd49c1 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1177.255407] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9417743f-f8de-480e-9ac1-64410d392b47 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.267690] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6386cb-4ae1-4067-9a44-51f1411e2cce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.334501] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9400f822-e85b-4f9b-a46e-a32ba0644462 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.344475] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696786, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475679} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.346816] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a/7f8849fb-c5d6-47a1-8079-08dfb2e0b85a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1177.347052] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1177.347336] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07e07107-c2eb-4597-84c2-e50028618038 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.351409] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a03598b-c6d2-4713-bc7b-916412176747 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.362354] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1177.362354] env[63297]: value = "task-1696787" [ 1177.362354] env[63297]: _type = "Task" [ 1177.362354] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.371921] env[63297]: DEBUG nova.compute.provider_tree [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.388273] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522cc3bf-c359-fa46-4bf6-e23122c08224, 'name': SearchDatastore_Task, 'duration_secs': 0.008064} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.395170] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.395562] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3/cce038d4-dc9a-4fae-8348-1c2f674b79e3.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1177.396249] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696787, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.396512] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.396709] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1177.396954] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b205b500-737a-4019-8d29-8b0f0de554fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.401491] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-879a4ce4-2842-4842-a3d8-6bebed7a25cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.409566] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1177.409566] env[63297]: value = "task-1696788" [ 1177.409566] env[63297]: _type = "Task" [ 1177.409566] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.410981] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1177.411081] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1177.415102] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd25b0b7-e554-4501-a9ee-9a319671952e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.427232] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696788, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.428733] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1177.428733] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527a80ab-700c-7c58-3849-9adf7d900ffb" [ 1177.428733] env[63297]: _type = "Task" [ 1177.428733] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.438138] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527a80ab-700c-7c58-3849-9adf7d900ffb, 'name': SearchDatastore_Task, 'duration_secs': 0.007411} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.439015] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b4a499a-fab7-454b-9bb1-b6d2cbde7b8d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.445104] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1177.445104] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5215131a-a1c8-8fb0-50a1-3962d19e4007" [ 1177.445104] env[63297]: _type = "Task" [ 1177.445104] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.459243] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5215131a-a1c8-8fb0-50a1-3962d19e4007, 'name': SearchDatastore_Task, 'duration_secs': 0.008318} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.459605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.459981] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] e5f198e8-2080-4e3e-8ad5-964b855d70ff/e5f198e8-2080-4e3e-8ad5-964b855d70ff.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1177.460294] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbd412cf-7082-472a-9f72-8411d74a3c68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.469472] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1177.469472] env[63297]: value = "task-1696789" [ 1177.469472] env[63297]: _type = "Task" [ 1177.469472] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.479130] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.681936] env[63297]: DEBUG nova.network.neutron [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1177.886191] env[63297]: DEBUG nova.scheduler.client.report [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1177.891109] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696787, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064197} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.892062] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1177.892510] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9f65d9-6241-45ca-8970-e16555266849 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.923314] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a/7f8849fb-c5d6-47a1-8079-08dfb2e0b85a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.929380] env[63297]: DEBUG nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1177.931011] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5019d41-e9a7-4c10-921d-8ca8d9370558 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.954309] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696788, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431089} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.956982] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3/cce038d4-dc9a-4fae-8348-1c2f674b79e3.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1177.957391] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1177.958199] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1177.958199] env[63297]: value = "task-1696790" [ 1177.958199] env[63297]: _type = "Task" [ 1177.958199] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.958402] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8643bc1-0dd4-48ed-a73a-19ed9c4c17dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.985480] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1177.985480] env[63297]: value = "task-1696791" [ 1177.985480] env[63297]: _type = "Task" [ 1177.985480] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.997454] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696789, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.000406] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.000406] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1178.000630] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1178.000832] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.000895] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1178.001051] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.001237] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1178.005379] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1178.005379] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1178.005379] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1178.005379] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1178.005379] env[63297]: DEBUG nova.virt.hardware [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1178.008891] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0078d33b-d43d-416e-a424-88dd78fc650d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.017374] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696791, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.022617] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331ac279-a807-466d-a6af-a393687892ff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.138629] env[63297]: DEBUG nova.network.neutron [req-4d8daca8-82cd-4e05-ac16-14ec96f9ed10 req-30d5a2bb-7d8d-4455-9947-9bbe42f474cc service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Updated VIF entry in instance network info cache for port 76456429-f255-412c-910b-a21b9ee6408a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.138629] env[63297]: DEBUG nova.network.neutron [req-4d8daca8-82cd-4e05-ac16-14ec96f9ed10 req-30d5a2bb-7d8d-4455-9947-9bbe42f474cc service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Updating instance_info_cache with network_info: [{"id": "76456429-f255-412c-910b-a21b9ee6408a", "address": "fa:16:3e:55:1f:b4", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76456429-f2", "ovs_interfaceid": "76456429-f255-412c-910b-a21b9ee6408a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.272382] env[63297]: DEBUG nova.network.neutron [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Updating instance_info_cache with network_info: [{"id": "c914211d-555e-4b13-b990-64105599d395", "address": "fa:16:3e:0a:ad:65", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc914211d-55", "ovs_interfaceid": "c914211d-555e-4b13-b990-64105599d395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.393689] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.393689] env[63297]: DEBUG nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1178.400603] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.221s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.400815] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.400966] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1178.401319] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.702s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.402831] env[63297]: INFO nova.compute.claims [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1178.407084] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba677a8-a5f5-443c-99f6-85f37d57d002 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.422841] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842938ca-42ef-409d-9d01-ee1fcf615811 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.445030] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723f6f26-8e23-4daf-9311-e80d8985552c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.452953] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425e60bb-4bef-4185-95aa-13af43718702 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.490419] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181418MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1178.490527] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.510414] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696791, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077336} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.510644] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696790, 'name': ReconfigVM_Task, 'duration_secs': 0.361044} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.510838] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696789, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670255} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.511073] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1178.511412] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a/7f8849fb-c5d6-47a1-8079-08dfb2e0b85a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.512085] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] e5f198e8-2080-4e3e-8ad5-964b855d70ff/e5f198e8-2080-4e3e-8ad5-964b855d70ff.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1178.512322] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1178.513069] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6930e7bd-4c7e-4a7f-9aa9-d635e11b784a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.515505] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddea1661-0398-4fff-ba86-8857a59af88c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.517314] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46795e30-dffd-422c-8170-0000ef7546ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.537484] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3/cce038d4-dc9a-4fae-8348-1c2f674b79e3.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1178.540262] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3ee07d8-ac14-432b-872c-091c0233c456 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.556057] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1178.556057] env[63297]: value = "task-1696792" [ 1178.556057] env[63297]: _type = "Task" [ 1178.556057] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.556533] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1178.556533] env[63297]: value = "task-1696793" [ 1178.556533] env[63297]: _type = "Task" [ 1178.556533] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.565295] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1178.565295] env[63297]: value = "task-1696794" [ 1178.565295] env[63297]: _type = "Task" [ 1178.565295] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.575021] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696792, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.575291] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696793, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.581187] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696794, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.642577] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d8daca8-82cd-4e05-ac16-14ec96f9ed10 req-30d5a2bb-7d8d-4455-9947-9bbe42f474cc service nova] Releasing lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.773945] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "refresh_cache-ef3346b1-ce09-4616-bdf4-200ea31efd01" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.774291] env[63297]: DEBUG nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Instance network_info: |[{"id": "c914211d-555e-4b13-b990-64105599d395", "address": "fa:16:3e:0a:ad:65", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc914211d-55", "ovs_interfaceid": "c914211d-555e-4b13-b990-64105599d395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1178.774698] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:ad:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52358fcc-0d9f-45dd-8c75-db533fd992c3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c914211d-555e-4b13-b990-64105599d395', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1178.786464] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Creating folder: Project (1348601359224186bf59b12bfa5f1ef0). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1178.787332] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6a92042-bdce-4103-b2ce-7415dfb6744c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.798817] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Created folder: Project (1348601359224186bf59b12bfa5f1ef0) in parent group-v353718. [ 1178.799258] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Creating folder: Instances. Parent ref: group-v353728. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1178.799736] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4ea4b72-9904-4ad7-ba7d-e0b66ce81a24 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.811034] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Created folder: Instances in parent group-v353728. [ 1178.811034] env[63297]: DEBUG oslo.service.loopingcall [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1178.811034] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1178.811034] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd65aa87-1818-4cff-a401-c651ff2f2896 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.840190] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1178.840190] env[63297]: value = "task-1696797" [ 1178.840190] env[63297]: _type = "Task" [ 1178.840190] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.849222] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696797, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.910906] env[63297]: DEBUG nova.compute.utils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1178.913903] env[63297]: DEBUG nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1178.913903] env[63297]: DEBUG nova.network.neutron [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1179.049474] env[63297]: DEBUG nova.policy [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '648e6fdbc1d5460883e4c876a3273d41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39986503166b4d44a424102c6c528225', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1179.076127] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696792, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065705} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.079765] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1179.080134] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696793, 'name': Rename_Task, 'duration_secs': 0.142334} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.081333] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d20519a-63cc-4c4a-b339-f540aeb243e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.085441] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1179.088241] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37086668-928e-42cd-be5d-df9409e5f4a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.089921] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696794, 'name': ReconfigVM_Task, 'duration_secs': 0.289246} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.090694] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Reconfigured VM instance instance-00000003 to attach disk [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3/cce038d4-dc9a-4fae-8348-1c2f674b79e3.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1179.092649] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee1b1abc-6bf5-4309-ac52-93f52f8d58e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.118363] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] e5f198e8-2080-4e3e-8ad5-964b855d70ff/e5f198e8-2080-4e3e-8ad5-964b855d70ff.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1179.120515] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8da944e4-b950-4d96-9c68-520688ccd634 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.140519] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1179.140519] env[63297]: value = "task-1696798" [ 1179.140519] env[63297]: _type = "Task" [ 1179.140519] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.144405] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1179.144405] env[63297]: value = "task-1696799" [ 1179.144405] env[63297]: _type = "Task" [ 1179.144405] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.146118] env[63297]: DEBUG nova.compute.manager [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Received event network-vif-plugged-c914211d-555e-4b13-b990-64105599d395 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1179.146395] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] Acquiring lock "ef3346b1-ce09-4616-bdf4-200ea31efd01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.146537] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] Lock "ef3346b1-ce09-4616-bdf4-200ea31efd01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.147050] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] Lock "ef3346b1-ce09-4616-bdf4-200ea31efd01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.147050] env[63297]: DEBUG nova.compute.manager [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] No waiting events found dispatching network-vif-plugged-c914211d-555e-4b13-b990-64105599d395 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1179.147050] env[63297]: WARNING nova.compute.manager [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Received unexpected event network-vif-plugged-c914211d-555e-4b13-b990-64105599d395 for instance with vm_state building and task_state spawning. [ 1179.147311] env[63297]: DEBUG nova.compute.manager [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Received event network-changed-c914211d-555e-4b13-b990-64105599d395 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1179.147854] env[63297]: DEBUG nova.compute.manager [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Refreshing instance network info cache due to event network-changed-c914211d-555e-4b13-b990-64105599d395. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1179.147854] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] Acquiring lock "refresh_cache-ef3346b1-ce09-4616-bdf4-200ea31efd01" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.148204] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] Acquired lock "refresh_cache-ef3346b1-ce09-4616-bdf4-200ea31efd01" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.148264] env[63297]: DEBUG nova.network.neutron [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Refreshing network info cache for port c914211d-555e-4b13-b990-64105599d395 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1179.165330] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1179.165330] env[63297]: value = "task-1696800" [ 1179.165330] env[63297]: _type = "Task" [ 1179.165330] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.180812] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696798, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.187854] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696799, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.192601] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.355160] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696797, 'name': CreateVM_Task, 'duration_secs': 0.446621} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.355355] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1179.356765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.357981] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.357981] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1179.358606] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-988651ac-8198-4c0b-9dfe-1965aeadad43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.364302] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1179.364302] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dec8e3-4e8a-c7ee-84fd-fa5ee1d46f8d" [ 1179.364302] env[63297]: _type = "Task" [ 1179.364302] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.376692] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dec8e3-4e8a-c7ee-84fd-fa5ee1d46f8d, 'name': SearchDatastore_Task, 'duration_secs': 0.008806} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.377290] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.377489] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1179.377878] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.378076] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.378400] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1179.378878] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce1d054d-c0b2-41bb-8073-81743861ca17 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.387575] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1179.387763] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1179.388591] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e2f638e-4fd6-428a-8610-c4e49b0a9194 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.394846] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1179.394846] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52534a4d-77f2-4b98-37c2-241e94cd25d7" [ 1179.394846] env[63297]: _type = "Task" [ 1179.394846] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.404063] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52534a4d-77f2-4b98-37c2-241e94cd25d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.417602] env[63297]: DEBUG nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1179.592757] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccc978a-c897-4606-b059-807ad027a133 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.605791] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486ffce6-4181-4dbe-8d5e-c629268924b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.639596] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e12f72a-e080-4522-9447-69000d9fdfdc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.651293] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6827c0-c8ec-4460-ac7d-58fc9c16ea0c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.660686] env[63297]: DEBUG oslo_vmware.api [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696798, 'name': PowerOnVM_Task, 'duration_secs': 0.511467} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.661536] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1179.661961] env[63297]: INFO nova.compute.manager [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Took 8.78 seconds to spawn the instance on the hypervisor. [ 1179.664062] env[63297]: DEBUG nova.compute.manager [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1179.666206] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51bbfa13-9317-4510-b92d-76e738ecbdf5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.679850] env[63297]: DEBUG nova.compute.provider_tree [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.688349] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696799, 'name': Rename_Task, 'duration_secs': 0.188837} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.691421] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1179.697377] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a888022-6b3c-49e5-b29c-0ecac668c1af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.699938] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696800, 'name': ReconfigVM_Task, 'duration_secs': 0.372787} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.700408] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Reconfigured VM instance instance-00000001 to attach disk [datastore1] e5f198e8-2080-4e3e-8ad5-964b855d70ff/e5f198e8-2080-4e3e-8ad5-964b855d70ff.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1179.701363] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23bef8e0-d559-46c7-906d-12f03bee4016 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.705338] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1179.705338] env[63297]: value = "task-1696801" [ 1179.705338] env[63297]: _type = "Task" [ 1179.705338] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.710376] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1179.710376] env[63297]: value = "task-1696802" [ 1179.710376] env[63297]: _type = "Task" [ 1179.710376] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.716476] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.724019] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696802, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.905218] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52534a4d-77f2-4b98-37c2-241e94cd25d7, 'name': SearchDatastore_Task, 'duration_secs': 0.008204} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.907051] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3b9edbb-1aa6-4839-ac83-4f6395dc6129 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.918681] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1179.918681] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521dbfe0-9ff8-35ff-3501-af1a0aa77a27" [ 1179.918681] env[63297]: _type = "Task" [ 1179.918681] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.934245] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521dbfe0-9ff8-35ff-3501-af1a0aa77a27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.193032] env[63297]: DEBUG nova.scheduler.client.report [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1180.212997] env[63297]: INFO nova.compute.manager [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Took 14.36 seconds to build instance. [ 1180.228166] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696801, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.236411] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696802, 'name': Rename_Task, 'duration_secs': 0.247908} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.237298] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1180.237298] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e0ba249-0155-4aac-bcb0-4fc4dcc477d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.244955] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1180.244955] env[63297]: value = "task-1696803" [ 1180.244955] env[63297]: _type = "Task" [ 1180.244955] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.259376] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.317598] env[63297]: DEBUG nova.network.neutron [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Updated VIF entry in instance network info cache for port c914211d-555e-4b13-b990-64105599d395. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1180.317957] env[63297]: DEBUG nova.network.neutron [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Updating instance_info_cache with network_info: [{"id": "c914211d-555e-4b13-b990-64105599d395", "address": "fa:16:3e:0a:ad:65", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc914211d-55", "ovs_interfaceid": "c914211d-555e-4b13-b990-64105599d395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.344505] env[63297]: DEBUG nova.network.neutron [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Successfully updated port: de9a987f-1f14-4610-a5aa-6271a1dd49c1 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1180.432481] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521dbfe0-9ff8-35ff-3501-af1a0aa77a27, 'name': SearchDatastore_Task, 'duration_secs': 0.010408} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.432889] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.433419] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ef3346b1-ce09-4616-bdf4-200ea31efd01/ef3346b1-ce09-4616-bdf4-200ea31efd01.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1180.433419] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7899b8b9-9a3e-4968-bf75-a554fc96ac20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.439240] env[63297]: DEBUG nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1180.450348] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1180.450348] env[63297]: value = "task-1696804" [ 1180.450348] env[63297]: _type = "Task" [ 1180.450348] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.462186] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696804, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.492271] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1180.494922] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1180.495242] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1180.495495] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1180.495947] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1180.498694] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1180.500320] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1180.500320] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1180.500320] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1180.500320] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1180.500320] env[63297]: DEBUG nova.virt.hardware [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1180.500942] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60abccfb-5aff-4ec7-b6af-113ad2a179e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.511821] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c753ef-9b66-46d8-9114-728fc2140241 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.536503] env[63297]: DEBUG nova.network.neutron [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Successfully created port: fab4c7d4-707f-4b4d-81ff-d6796a6fd27a {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1180.697700] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.296s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.698320] env[63297]: DEBUG nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1180.704191] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.550s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.705903] env[63297]: INFO nova.compute.claims [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.723836] env[63297]: DEBUG oslo_concurrency.lockutils [None req-006b34ac-c29a-45b1-992c-cb45e7e5a650 tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.881s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.724103] env[63297]: DEBUG oslo_vmware.api [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696801, 'name': PowerOnVM_Task, 'duration_secs': 0.846887} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.724356] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1180.725281] env[63297]: INFO nova.compute.manager [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Took 7.48 seconds to spawn the instance on the hypervisor. [ 1180.725281] env[63297]: DEBUG nova.compute.manager [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1180.725802] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b7da09-ac00-4f25-af4d-5f94f04ad7af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.759690] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696803, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.820454] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ade1fd9-76c9-4298-8b88-102106876b02 req-690fb4bc-b8ee-4021-a00d-e7c9f2cef8bd service nova] Releasing lock "refresh_cache-ef3346b1-ce09-4616-bdf4-200ea31efd01" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.847932] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "refresh_cache-3d66ef2c-ac35-4eae-a205-6dd80ee564d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.848110] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquired lock "refresh_cache-3d66ef2c-ac35-4eae-a205-6dd80ee564d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.848386] env[63297]: DEBUG nova.network.neutron [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1180.963986] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696804, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.211304] env[63297]: DEBUG nova.compute.utils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1181.213073] env[63297]: DEBUG nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1181.213598] env[63297]: DEBUG nova.network.neutron [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1181.266018] env[63297]: INFO nova.compute.manager [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Took 14.91 seconds to build instance. [ 1181.278910] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696803, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.323864] env[63297]: DEBUG nova.policy [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34ca0a31ccec4323bcf598f8eddf7115', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '831de8a2aec2471e8eac0cf056f16265', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1181.441574] env[63297]: DEBUG nova.network.neutron [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1181.465956] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696804, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517788} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.466134] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ef3346b1-ce09-4616-bdf4-200ea31efd01/ef3346b1-ce09-4616-bdf4-200ea31efd01.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1181.466403] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1181.466689] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13a745d2-98d9-4932-bd9f-87c547c29a2a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.474807] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1181.474807] env[63297]: value = "task-1696805" [ 1181.474807] env[63297]: _type = "Task" [ 1181.474807] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.488651] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.719329] env[63297]: DEBUG nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1181.767116] env[63297]: DEBUG oslo_vmware.api [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696803, 'name': PowerOnVM_Task, 'duration_secs': 1.227235} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.767116] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1181.767116] env[63297]: INFO nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Took 14.88 seconds to spawn the instance on the hypervisor. [ 1181.767116] env[63297]: DEBUG nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1181.771373] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4f9230ad-96ca-4ba7-80c4-d2b08f802527 tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.428s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.771438] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fe9af4-ce1e-41f0-b058-51329a058ba1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.827951] env[63297]: DEBUG nova.network.neutron [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Updating instance_info_cache with network_info: [{"id": "de9a987f-1f14-4610-a5aa-6271a1dd49c1", "address": "fa:16:3e:b1:04:8d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde9a987f-1f", "ovs_interfaceid": "de9a987f-1f14-4610-a5aa-6271a1dd49c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.923551] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a56f1c-2d6f-4af3-a3ae-e30f3a22aa9c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.932853] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363fcca7-cd4a-4231-8a63-e9e2224949f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.968733] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6611b794-eb99-48ce-8114-b742e0d59aa8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.980640] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29783a77-a6e7-4503-8c56-85738464117d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.985332] env[63297]: DEBUG nova.network.neutron [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Successfully created port: 0f8d8353-c946-4413-9b67-19c002e27040 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1181.993734] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108622} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.001548] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1182.001697] env[63297]: DEBUG nova.compute.provider_tree [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1182.006649] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b56eff0-18e7-4d4f-a2f1-0a65b7cc93bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.035613] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] ef3346b1-ce09-4616-bdf4-200ea31efd01/ef3346b1-ce09-4616-bdf4-200ea31efd01.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1182.035927] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c85e3c97-5ddb-439e-8383-854112995b57 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.055853] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1182.055853] env[63297]: value = "task-1696806" [ 1182.055853] env[63297]: _type = "Task" [ 1182.055853] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.064442] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696806, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.295793] env[63297]: INFO nova.compute.manager [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Took 19.61 seconds to build instance. [ 1182.331776] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Releasing lock "refresh_cache-3d66ef2c-ac35-4eae-a205-6dd80ee564d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.331776] env[63297]: DEBUG nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Instance network_info: |[{"id": "de9a987f-1f14-4610-a5aa-6271a1dd49c1", "address": "fa:16:3e:b1:04:8d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde9a987f-1f", "ovs_interfaceid": "de9a987f-1f14-4610-a5aa-6271a1dd49c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1182.334400] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:04:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de9a987f-1f14-4610-a5aa-6271a1dd49c1', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1182.342672] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Creating folder: Project (4701357dafc84216ae883b6e88b34d5d). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1182.342672] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-348ee5f9-1983-4c00-98ad-69825ffa45d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.352467] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Created folder: Project (4701357dafc84216ae883b6e88b34d5d) in parent group-v353718. [ 1182.352665] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Creating folder: Instances. Parent ref: group-v353731. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1182.352928] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23e53baf-3c94-44da-a674-5ece9300d258 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.358547] env[63297]: DEBUG nova.compute.manager [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Received event network-vif-plugged-de9a987f-1f14-4610-a5aa-6271a1dd49c1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1182.358749] env[63297]: DEBUG oslo_concurrency.lockutils [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] Acquiring lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.358948] env[63297]: DEBUG oslo_concurrency.lockutils [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] Lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.359212] env[63297]: DEBUG oslo_concurrency.lockutils [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] Lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.359321] env[63297]: DEBUG nova.compute.manager [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] No waiting events found dispatching network-vif-plugged-de9a987f-1f14-4610-a5aa-6271a1dd49c1 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1182.359476] env[63297]: WARNING nova.compute.manager [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Received unexpected event network-vif-plugged-de9a987f-1f14-4610-a5aa-6271a1dd49c1 for instance with vm_state building and task_state spawning. [ 1182.359629] env[63297]: DEBUG nova.compute.manager [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Received event network-changed-de9a987f-1f14-4610-a5aa-6271a1dd49c1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1182.359774] env[63297]: DEBUG nova.compute.manager [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Refreshing instance network info cache due to event network-changed-de9a987f-1f14-4610-a5aa-6271a1dd49c1. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1182.359955] env[63297]: DEBUG oslo_concurrency.lockutils [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] Acquiring lock "refresh_cache-3d66ef2c-ac35-4eae-a205-6dd80ee564d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.360084] env[63297]: DEBUG oslo_concurrency.lockutils [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] Acquired lock "refresh_cache-3d66ef2c-ac35-4eae-a205-6dd80ee564d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.360230] env[63297]: DEBUG nova.network.neutron [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Refreshing network info cache for port de9a987f-1f14-4610-a5aa-6271a1dd49c1 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1182.373481] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Created folder: Instances in parent group-v353731. [ 1182.373763] env[63297]: DEBUG oslo.service.loopingcall [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1182.373953] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1182.374451] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5856e9b-3329-4f54-b818-5ae6ebbfe83c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.402981] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1182.402981] env[63297]: value = "task-1696809" [ 1182.402981] env[63297]: _type = "Task" [ 1182.402981] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.414206] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696809, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.540689] env[63297]: ERROR nova.scheduler.client.report [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [req-0bbedf03-8076-4312-9ceb-cab3c3982663] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0bbedf03-8076-4312-9ceb-cab3c3982663"}]} [ 1182.569371] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696806, 'name': ReconfigVM_Task, 'duration_secs': 0.288239} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.570814] env[63297]: DEBUG nova.scheduler.client.report [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1182.576549] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Reconfigured VM instance instance-00000004 to attach disk [datastore1] ef3346b1-ce09-4616-bdf4-200ea31efd01/ef3346b1-ce09-4616-bdf4-200ea31efd01.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1182.577813] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c7ef213-0928-4987-8664-a5765b3fbda5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.589172] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1182.589172] env[63297]: value = "task-1696810" [ 1182.589172] env[63297]: _type = "Task" [ 1182.589172] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.599802] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696810, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.603314] env[63297]: DEBUG nova.scheduler.client.report [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1182.603314] env[63297]: DEBUG nova.compute.provider_tree [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1182.622874] env[63297]: DEBUG nova.scheduler.client.report [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1182.656211] env[63297]: DEBUG nova.scheduler.client.report [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1182.747932] env[63297]: DEBUG nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1182.784541] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1182.784782] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1182.784950] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1182.785105] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1182.785225] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1182.785373] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1182.786082] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1182.786082] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1182.786082] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1182.786231] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1182.786299] env[63297]: DEBUG nova.virt.hardware [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1182.787710] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c043aee8-b176-46b0-8644-88ae6746e956 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.796292] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806a8393-381e-49cc-a8cd-278feac9ab34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.808322] env[63297]: DEBUG oslo_concurrency.lockutils [None req-202bd082-820e-469d-8c8b-0f35bf77f2a8 tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.132s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.808605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquiring lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.808807] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.884356] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c28b5e3-aee5-4e72-98dc-e7eab8525d7e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.893749] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d3015a-8782-4c5b-81f5-3c0e94d997da {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.930664] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c239e69-bba1-4750-a3eb-8f40fc2ab39e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.941181] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696809, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.945032] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3405880c-d22f-4d46-967a-48ed26b360f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.961995] env[63297]: DEBUG nova.compute.provider_tree [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.100659] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696810, 'name': Rename_Task, 'duration_secs': 0.150833} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.100944] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1183.101235] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca97276a-f316-410a-94a1-74d6967e93a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.109804] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1183.109804] env[63297]: value = "task-1696811" [ 1183.109804] env[63297]: _type = "Task" [ 1183.109804] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.118977] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.312255] env[63297]: DEBUG nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1183.324392] env[63297]: DEBUG nova.network.neutron [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Updated VIF entry in instance network info cache for port de9a987f-1f14-4610-a5aa-6271a1dd49c1. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1183.324392] env[63297]: DEBUG nova.network.neutron [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Updating instance_info_cache with network_info: [{"id": "de9a987f-1f14-4610-a5aa-6271a1dd49c1", "address": "fa:16:3e:b1:04:8d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde9a987f-1f", "ovs_interfaceid": "de9a987f-1f14-4610-a5aa-6271a1dd49c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.441822] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696809, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.466086] env[63297]: DEBUG nova.scheduler.client.report [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1183.626497] env[63297]: DEBUG oslo_vmware.api [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696811, 'name': PowerOnVM_Task, 'duration_secs': 0.459466} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.626602] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.626743] env[63297]: INFO nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Took 8.13 seconds to spawn the instance on the hypervisor. [ 1183.629504] env[63297]: DEBUG nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1183.629504] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4562de-2c5d-43fd-81f0-db0904c9a986 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.826657] env[63297]: DEBUG oslo_concurrency.lockutils [req-387b3753-c81e-4480-81a2-97a3e60b5eed req-69b8f0ab-3939-496a-bc43-3535f51fa660 service nova] Releasing lock "refresh_cache-3d66ef2c-ac35-4eae-a205-6dd80ee564d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.849715] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.955222] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696809, 'name': CreateVM_Task, 'duration_secs': 1.363871} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.955222] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1183.955222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.955222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.955222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1183.955463] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfd031e6-19de-4fa3-97f5-f67c3837d823 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.963813] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1183.963813] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ca4b8c-10de-4155-35a9-ce5e7d9afd1d" [ 1183.963813] env[63297]: _type = "Task" [ 1183.963813] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.974698] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.270s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.975104] env[63297]: DEBUG nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1183.990014] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.732s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.992262] env[63297]: INFO nova.compute.claims [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1183.994924] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ca4b8c-10de-4155-35a9-ce5e7d9afd1d, 'name': SearchDatastore_Task, 'duration_secs': 0.011233} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.996415] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.997015] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1183.997015] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.997015] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.997147] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1183.997605] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56957817-cdd9-4023-9548-2a69cd951ca9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.006960] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1184.007191] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1184.008049] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5907851-287e-44b7-b054-ecb498af7a88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.017274] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1184.017274] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526ae07d-9ed8-0821-672b-cd13af732d93" [ 1184.017274] env[63297]: _type = "Task" [ 1184.017274] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.025121] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526ae07d-9ed8-0821-672b-cd13af732d93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.148753] env[63297]: INFO nova.compute.manager [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Took 17.33 seconds to build instance. [ 1184.371987] env[63297]: DEBUG nova.network.neutron [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Successfully updated port: fab4c7d4-707f-4b4d-81ff-d6796a6fd27a {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.378627] env[63297]: DEBUG nova.compute.manager [None req-f5c2ce29-7160-4639-8402-487d37e87296 tempest-ServerDiagnosticsV248Test-1977235905 tempest-ServerDiagnosticsV248Test-1977235905-project-admin] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1184.380091] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bd36db-6c82-404f-91e4-fc8c5cbc4500 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.391312] env[63297]: INFO nova.compute.manager [None req-f5c2ce29-7160-4639-8402-487d37e87296 tempest-ServerDiagnosticsV248Test-1977235905 tempest-ServerDiagnosticsV248Test-1977235905-project-admin] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Retrieving diagnostics [ 1184.392129] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee3980e-99f4-4dbc-8626-dcbd319c0749 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.472639] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "9b1306f9-4b0a-4116-8e79-271478f33490" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.473024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.489341] env[63297]: DEBUG nova.compute.utils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1184.489341] env[63297]: DEBUG nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1184.489341] env[63297]: DEBUG nova.network.neutron [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1184.533739] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526ae07d-9ed8-0821-672b-cd13af732d93, 'name': SearchDatastore_Task, 'duration_secs': 0.010608} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.534665] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cf5b137-fd21-42d1-906e-3e97a254fabe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.541337] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1184.541337] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c64af0-1eac-e54f-164e-064dadbe75f2" [ 1184.541337] env[63297]: _type = "Task" [ 1184.541337] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.551836] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c64af0-1eac-e54f-164e-064dadbe75f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.602561] env[63297]: DEBUG nova.policy [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1184.650351] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d6b2008-94b5-4ebd-b1d3-db692318ac4f tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "ef3346b1-ce09-4616-bdf4-200ea31efd01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.848s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.707226] env[63297]: DEBUG nova.network.neutron [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Successfully updated port: 0f8d8353-c946-4413-9b67-19c002e27040 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.882248] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.882248] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquired lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.882248] env[63297]: DEBUG nova.network.neutron [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1184.982008] env[63297]: DEBUG nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1184.994062] env[63297]: DEBUG nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1185.053612] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c64af0-1eac-e54f-164e-064dadbe75f2, 'name': SearchDatastore_Task, 'duration_secs': 0.012671} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.056546] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1185.056823] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 3d66ef2c-ac35-4eae-a205-6dd80ee564d1/3d66ef2c-ac35-4eae-a205-6dd80ee564d1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1185.057453] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2272a627-04c1-4476-9c06-f13d0527c9ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.062169] env[63297]: DEBUG nova.network.neutron [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Successfully created port: 14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1185.065789] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1185.065789] env[63297]: value = "task-1696812" [ 1185.065789] env[63297]: _type = "Task" [ 1185.065789] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.076386] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696812, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.134216] env[63297]: INFO nova.compute.manager [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Rebuilding instance [ 1185.188025] env[63297]: DEBUG nova.compute.manager [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1185.188933] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e283c840-a080-453c-ad3d-ff8b769812c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.199726] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4a39ce-cfdc-4368-bf19-6f24c5417eff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.207970] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0044d0db-c8c0-4fc1-bca8-39fd33820c1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.214031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquiring lock "refresh_cache-8adfd26f-1012-4e52-9371-e9d3f654046c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.214125] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquired lock "refresh_cache-8adfd26f-1012-4e52-9371-e9d3f654046c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.214279] env[63297]: DEBUG nova.network.neutron [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1185.249377] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2160e6f-6bd6-4ae6-a6ba-f6d4cb70e462 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.262426] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff11d0d-9435-42d1-9fc1-7fe97cb691c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.273965] env[63297]: DEBUG nova.compute.provider_tree [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.443748] env[63297]: DEBUG nova.network.neutron [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1185.514453] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.577713] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696812, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.701702] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1185.702193] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0788f58d-00de-4402-894b-0a1a82dc4bc7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.710897] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1185.710897] env[63297]: value = "task-1696813" [ 1185.710897] env[63297]: _type = "Task" [ 1185.710897] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.729486] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.777940] env[63297]: DEBUG nova.scheduler.client.report [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1185.814905] env[63297]: DEBUG nova.network.neutron [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1185.846430] env[63297]: DEBUG nova.network.neutron [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Updating instance_info_cache with network_info: [{"id": "fab4c7d4-707f-4b4d-81ff-d6796a6fd27a", "address": "fa:16:3e:a5:08:d2", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfab4c7d4-70", "ovs_interfaceid": "fab4c7d4-707f-4b4d-81ff-d6796a6fd27a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.006677] env[63297]: DEBUG nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1186.037406] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1186.037837] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1186.038090] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.038326] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1186.038533] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.038720] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1186.039453] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1186.039453] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1186.039453] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1186.039743] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1186.040109] env[63297]: DEBUG nova.virt.hardware [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1186.041303] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bc2164-4f90-4347-9be9-ca95a68dc42e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.054325] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc1bb92-2ee8-4a70-ab52-fbdddb39952a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.083560] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696812, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.784496} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.083641] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 3d66ef2c-ac35-4eae-a205-6dd80ee564d1/3d66ef2c-ac35-4eae-a205-6dd80ee564d1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1186.083864] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1186.084139] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2dd7df51-7a12-4fbe-989a-1433c6f0053a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.091998] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1186.091998] env[63297]: value = "task-1696814" [ 1186.091998] env[63297]: _type = "Task" [ 1186.091998] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.101431] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.151830] env[63297]: DEBUG nova.network.neutron [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Updating instance_info_cache with network_info: [{"id": "0f8d8353-c946-4413-9b67-19c002e27040", "address": "fa:16:3e:a2:46:8e", "network": {"id": "9d1cf425-da29-4651-bfc6-aa6d8ea4f410", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-739463594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "831de8a2aec2471e8eac0cf056f16265", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f8d8353-c9", "ovs_interfaceid": "0f8d8353-c946-4413-9b67-19c002e27040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.222873] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696813, 'name': PowerOffVM_Task, 'duration_secs': 0.167445} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.223246] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1186.223246] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1186.225084] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9ceb15-27c0-4877-a747-94024aace7a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.231831] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1186.232098] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a739b8a7-7ccc-4f5e-b3dc-a3a16bb3c849 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.259242] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1186.259697] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1186.259697] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Deleting the datastore file [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1186.260042] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba4b7cf6-ab88-4371-8d07-2b250b487cae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.267794] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1186.267794] env[63297]: value = "task-1696816" [ 1186.267794] env[63297]: _type = "Task" [ 1186.267794] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.279478] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.288302] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.289066] env[63297]: DEBUG nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1186.293247] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.803s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.349236] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Releasing lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.349557] env[63297]: DEBUG nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Instance network_info: |[{"id": "fab4c7d4-707f-4b4d-81ff-d6796a6fd27a", "address": "fa:16:3e:a5:08:d2", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfab4c7d4-70", "ovs_interfaceid": "fab4c7d4-707f-4b4d-81ff-d6796a6fd27a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1186.350245] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:08:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fab4c7d4-707f-4b4d-81ff-d6796a6fd27a', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1186.358193] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Creating folder: Project (39986503166b4d44a424102c6c528225). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1186.359587] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3432ed14-1404-4e81-b713-d3bcd1519dbf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.370303] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Created folder: Project (39986503166b4d44a424102c6c528225) in parent group-v353718. [ 1186.370543] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Creating folder: Instances. Parent ref: group-v353734. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1186.370819] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25537a87-d4f7-4382-bfcb-1b3bb2659921 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.382719] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Created folder: Instances in parent group-v353734. [ 1186.382719] env[63297]: DEBUG oslo.service.loopingcall [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.382885] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1186.383106] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-949b0c90-90c4-4ccf-bf72-5f510d65f5ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.409563] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1186.409563] env[63297]: value = "task-1696819" [ 1186.409563] env[63297]: _type = "Task" [ 1186.409563] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.419199] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696819, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.601065] env[63297]: DEBUG nova.compute.manager [req-7bb39871-0a48-453c-808b-a1d8046186f9 req-29faea22-645c-4fc1-afd9-2152baa61d76 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Received event network-vif-plugged-fab4c7d4-707f-4b4d-81ff-d6796a6fd27a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1186.601065] env[63297]: DEBUG oslo_concurrency.lockutils [req-7bb39871-0a48-453c-808b-a1d8046186f9 req-29faea22-645c-4fc1-afd9-2152baa61d76 service nova] Acquiring lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.601658] env[63297]: DEBUG oslo_concurrency.lockutils [req-7bb39871-0a48-453c-808b-a1d8046186f9 req-29faea22-645c-4fc1-afd9-2152baa61d76 service nova] Lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.601883] env[63297]: DEBUG oslo_concurrency.lockutils [req-7bb39871-0a48-453c-808b-a1d8046186f9 req-29faea22-645c-4fc1-afd9-2152baa61d76 service nova] Lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.602150] env[63297]: DEBUG nova.compute.manager [req-7bb39871-0a48-453c-808b-a1d8046186f9 req-29faea22-645c-4fc1-afd9-2152baa61d76 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] No waiting events found dispatching network-vif-plugged-fab4c7d4-707f-4b4d-81ff-d6796a6fd27a {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1186.602338] env[63297]: WARNING nova.compute.manager [req-7bb39871-0a48-453c-808b-a1d8046186f9 req-29faea22-645c-4fc1-afd9-2152baa61d76 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Received unexpected event network-vif-plugged-fab4c7d4-707f-4b4d-81ff-d6796a6fd27a for instance with vm_state building and task_state spawning. [ 1186.608093] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07485} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.608384] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1186.609207] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398f948a-4a70-4f39-9392-5adcfd1e98dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.633420] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 3d66ef2c-ac35-4eae-a205-6dd80ee564d1/3d66ef2c-ac35-4eae-a205-6dd80ee564d1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1186.634169] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-369f1f0c-5573-4a32-bef5-a86685584752 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.654533] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Releasing lock "refresh_cache-8adfd26f-1012-4e52-9371-e9d3f654046c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.654859] env[63297]: DEBUG nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Instance network_info: |[{"id": "0f8d8353-c946-4413-9b67-19c002e27040", "address": "fa:16:3e:a2:46:8e", "network": {"id": "9d1cf425-da29-4651-bfc6-aa6d8ea4f410", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-739463594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "831de8a2aec2471e8eac0cf056f16265", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f8d8353-c9", "ovs_interfaceid": "0f8d8353-c946-4413-9b67-19c002e27040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1186.656063] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:46:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90c863af-25e3-4fc6-a125-8baa7540298c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f8d8353-c946-4413-9b67-19c002e27040', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1186.664716] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Creating folder: Project (831de8a2aec2471e8eac0cf056f16265). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1186.665399] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11c9a1ca-5b15-4dd6-8f6b-96a92ceb99d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.667669] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1186.667669] env[63297]: value = "task-1696820" [ 1186.667669] env[63297]: _type = "Task" [ 1186.667669] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.674552] env[63297]: DEBUG nova.compute.manager [req-fec6db4e-a2ca-4558-8972-62378818e043 req-292c8af7-ebfc-48d0-ad7c-fe69c23e576b service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Received event network-vif-plugged-0f8d8353-c946-4413-9b67-19c002e27040 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1186.674552] env[63297]: DEBUG oslo_concurrency.lockutils [req-fec6db4e-a2ca-4558-8972-62378818e043 req-292c8af7-ebfc-48d0-ad7c-fe69c23e576b service nova] Acquiring lock "8adfd26f-1012-4e52-9371-e9d3f654046c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.674552] env[63297]: DEBUG oslo_concurrency.lockutils [req-fec6db4e-a2ca-4558-8972-62378818e043 req-292c8af7-ebfc-48d0-ad7c-fe69c23e576b service nova] Lock "8adfd26f-1012-4e52-9371-e9d3f654046c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.674552] env[63297]: DEBUG oslo_concurrency.lockutils [req-fec6db4e-a2ca-4558-8972-62378818e043 req-292c8af7-ebfc-48d0-ad7c-fe69c23e576b service nova] Lock "8adfd26f-1012-4e52-9371-e9d3f654046c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.674901] env[63297]: DEBUG nova.compute.manager [req-fec6db4e-a2ca-4558-8972-62378818e043 req-292c8af7-ebfc-48d0-ad7c-fe69c23e576b service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] No waiting events found dispatching network-vif-plugged-0f8d8353-c946-4413-9b67-19c002e27040 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1186.675087] env[63297]: WARNING nova.compute.manager [req-fec6db4e-a2ca-4558-8972-62378818e043 req-292c8af7-ebfc-48d0-ad7c-fe69c23e576b service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Received unexpected event network-vif-plugged-0f8d8353-c946-4413-9b67-19c002e27040 for instance with vm_state building and task_state spawning. [ 1186.678483] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Created folder: Project (831de8a2aec2471e8eac0cf056f16265) in parent group-v353718. [ 1186.682330] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Creating folder: Instances. Parent ref: group-v353737. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1186.689148] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2fcb2d7-997e-493d-8f8c-35c8b9672fba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.690633] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.699457] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Created folder: Instances in parent group-v353737. [ 1186.699757] env[63297]: DEBUG oslo.service.loopingcall [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.700043] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1186.700364] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4b1cb72-df3f-457a-be08-4e6968c0bf06 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.721379] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1186.721379] env[63297]: value = "task-1696823" [ 1186.721379] env[63297]: _type = "Task" [ 1186.721379] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.729844] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696823, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.779416] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242033} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.780178] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.780515] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.780875] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.800038] env[63297]: DEBUG nova.compute.utils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1186.815366] env[63297]: DEBUG nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1186.815366] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1186.923292] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696819, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.988036] env[63297]: DEBUG nova.policy [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13c8359a7e444cb685dddde06efa122c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babfd205ed454924b0bceb1d03fcfdf2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1187.179429] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.234250] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696823, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.318699] env[63297]: DEBUG nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1187.328909] env[63297]: DEBUG nova.network.neutron [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Successfully updated port: 14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1187.358534] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance e5f198e8-2080-4e3e-8ad5-964b855d70ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.358691] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.358807] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance cce038d4-dc9a-4fae-8348-1c2f674b79e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.358929] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ef3346b1-ce09-4616-bdf4-200ea31efd01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.359213] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 3d66ef2c-ac35-4eae-a205-6dd80ee564d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.359213] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 81920a24-f406-4923-98b7-cc0f3d0ccc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.359326] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 8adfd26f-1012-4e52-9371-e9d3f654046c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.359629] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ef851d71-788d-42f8-a824-5d30a89e957b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.359629] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 961c3a87-7f53-4764-b8a4-40a408a30f90 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.427190] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696819, 'name': CreateVM_Task, 'duration_secs': 0.583728} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.427366] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.428124] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.428260] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.429697] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1187.429697] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6db2f521-6102-4086-900c-443179f165e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.433694] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1187.433694] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521315d2-1d80-d88d-17d9-482d131d57c2" [ 1187.433694] env[63297]: _type = "Task" [ 1187.433694] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.442164] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521315d2-1d80-d88d-17d9-482d131d57c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.598833] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Successfully created port: 01a6609e-6d94-400f-8f01-fc67889fb600 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1187.679698] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696820, 'name': ReconfigVM_Task, 'duration_secs': 0.775027} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.680455] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 3d66ef2c-ac35-4eae-a205-6dd80ee564d1/3d66ef2c-ac35-4eae-a205-6dd80ee564d1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.680808] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a23dce88-a6ee-44dd-b1b4-7c670322a2de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.690106] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1187.690106] env[63297]: value = "task-1696824" [ 1187.690106] env[63297]: _type = "Task" [ 1187.690106] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.699727] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696824, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.735304] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696823, 'name': CreateVM_Task, 'duration_secs': 0.673686} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.735383] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.736237] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.821642] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1187.821905] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1187.822037] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1187.822220] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1187.822354] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1187.822678] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1187.822760] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1187.822853] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1187.823059] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1187.823398] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1187.824010] env[63297]: DEBUG nova.virt.hardware [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1187.824569] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fe0b22-a69e-4322-b5db-bb1e67cb4625 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.834623] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.834861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.834979] env[63297]: DEBUG nova.network.neutron [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1187.847294] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec9d93f-6c2b-48b5-83f5-66af531085f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.865771] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ef57101e-1d8a-4ad5-ad68-cad2dbea33d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1187.867993] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1187.874591] env[63297]: DEBUG oslo.service.loopingcall [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1187.875559] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1187.875824] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-384bd181-f4ae-4466-85a8-2c4d43ec1fa5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.898777] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1187.898777] env[63297]: value = "task-1696825" [ 1187.898777] env[63297]: _type = "Task" [ 1187.898777] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.907123] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696825, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.944415] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521315d2-1d80-d88d-17d9-482d131d57c2, 'name': SearchDatastore_Task, 'duration_secs': 0.015932} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.944734] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.944957] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1187.945194] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.945333] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.945501] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.945836] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.946164] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1187.946383] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acff3dcc-715b-44b4-988f-df9d0516a0c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.948880] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-504f6cb9-1d9c-4689-a0b3-499e107ad5c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.954597] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1187.954597] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5252630a-c770-172d-5e44-995a794a7ec5" [ 1187.954597] env[63297]: _type = "Task" [ 1187.954597] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.968620] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5252630a-c770-172d-5e44-995a794a7ec5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.973722] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.973722] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1187.975625] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c181673f-de3e-4147-a790-c6c3dd6083bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.986717] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1187.986717] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5278ef2c-9ae4-8915-d11a-665f49161b7b" [ 1187.986717] env[63297]: _type = "Task" [ 1187.986717] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.004025] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5278ef2c-9ae4-8915-d11a-665f49161b7b, 'name': SearchDatastore_Task, 'duration_secs': 0.010368} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.004025] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-222105ee-7ec6-404c-b818-948b22c7493f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.011054] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1188.011054] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529b382e-888c-72a4-dec5-5e4fc501c4cd" [ 1188.011054] env[63297]: _type = "Task" [ 1188.011054] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.028865] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529b382e-888c-72a4-dec5-5e4fc501c4cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.197886] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696824, 'name': Rename_Task, 'duration_secs': 0.339068} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.198396] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1188.198805] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac207a3b-1393-4cc1-8b3e-0190ace778e7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.211018] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1188.211018] env[63297]: value = "task-1696826" [ 1188.211018] env[63297]: _type = "Task" [ 1188.211018] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.218780] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696826, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.340332] env[63297]: DEBUG nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1188.375622] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1188.375858] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1188.376100] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1188.376313] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1188.376456] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1188.377243] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1188.377598] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1188.377869] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1188.378216] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1188.378385] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1188.378695] env[63297]: DEBUG nova.virt.hardware [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1188.379492] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 9b1306f9-4b0a-4116-8e79-271478f33490 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1188.379788] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1188.379960] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1188.384626] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b9d41f-7d5f-41ac-b9cb-34614f0e5c0a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.395883] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22ed261-1194-4586-9ff3-b1170b806bfc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.402909] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Successfully created port: f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1188.428469] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696825, 'name': CreateVM_Task, 'duration_secs': 0.430185} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.428834] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1188.429276] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.438495] env[63297]: DEBUG nova.network.neutron [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1188.470734] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5252630a-c770-172d-5e44-995a794a7ec5, 'name': SearchDatastore_Task, 'duration_secs': 0.024142} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.471389] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.471705] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.472099] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.476414] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.476775] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1188.478282] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edf4a979-f7f0-40f5-a247-f249b0ed0f24 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.485610] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1188.485610] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dab764-b853-8293-bb16-3e1793eca39f" [ 1188.485610] env[63297]: _type = "Task" [ 1188.485610] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.499012] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dab764-b853-8293-bb16-3e1793eca39f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.527214] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529b382e-888c-72a4-dec5-5e4fc501c4cd, 'name': SearchDatastore_Task, 'duration_secs': 0.011608} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.527214] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.527945] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 81920a24-f406-4923-98b7-cc0f3d0ccc8b/81920a24-f406-4923-98b7-cc0f3d0ccc8b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1188.528772] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.528772] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1188.528772] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3f28c63-c3d4-4458-a210-de40409160f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.533526] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79540b58-fa3d-4fe6-b7a0-e3f6c557563e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.542071] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1188.542071] env[63297]: value = "task-1696827" [ 1188.542071] env[63297]: _type = "Task" [ 1188.542071] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.543577] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1188.543577] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1188.547495] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f27fcbd-748b-4597-984e-92ca47c3f3fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.560874] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696827, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.562544] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1188.562544] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522b0585-0103-3b18-cdd2-9fb046c7175c" [ 1188.562544] env[63297]: _type = "Task" [ 1188.562544] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.570386] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522b0585-0103-3b18-cdd2-9fb046c7175c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.636874] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d536d4-d412-4398-901a-405c726de48f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.645362] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1eb8c5-d783-4d36-acdc-711602e01ef9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.679858] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2935dc3c-7521-47e0-9a1b-580902774771 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.687768] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea785c4c-b6f8-44ef-8032-8dbccf29bb14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.701366] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1188.719197] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696826, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.887324] env[63297]: DEBUG nova.network.neutron [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [{"id": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "address": "fa:16:3e:d1:15:40", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14298cd9-89", "ovs_interfaceid": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.995235] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.995552] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.006783] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dab764-b853-8293-bb16-3e1793eca39f, 'name': SearchDatastore_Task, 'duration_secs': 0.010289} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.007139] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.007374] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1189.007579] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.054047] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696827, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.073800] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522b0585-0103-3b18-cdd2-9fb046c7175c, 'name': SearchDatastore_Task, 'duration_secs': 0.011701} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.074432] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-601a6562-45bd-4047-abc4-ac72fe389d64 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.082798] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1189.082798] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5204955d-9f62-7ccd-314c-affe89db4cd2" [ 1189.082798] env[63297]: _type = "Task" [ 1189.082798] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.096627] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5204955d-9f62-7ccd-314c-affe89db4cd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.227286] env[63297]: DEBUG oslo_vmware.api [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1696826, 'name': PowerOnVM_Task, 'duration_secs': 0.720153} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.227567] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1189.227772] env[63297]: INFO nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Took 11.30 seconds to spawn the instance on the hypervisor. [ 1189.227937] env[63297]: DEBUG nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1189.228746] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693203d6-e28a-446a-815b-dfa02122c84b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.232543] env[63297]: ERROR nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [req-59806628-1614-4fe7-8d2a-bc7a15c8a8b2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-59806628-1614-4fe7-8d2a-bc7a15c8a8b2"}]} [ 1189.244351] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Successfully created port: 881aea0b-28e5-4b91-af8d-d9c7c69b6446 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1189.259769] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1189.282166] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1189.282774] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1189.303341] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1189.326304] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1189.392919] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.392919] env[63297]: DEBUG nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Instance network_info: |[{"id": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "address": "fa:16:3e:d1:15:40", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14298cd9-89", "ovs_interfaceid": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1189.393705] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:15:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14298cd9-8999-4142-9f1f-7a512e4a09d1', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1189.403782] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Creating folder: Project (48bfb708de5c4dd287530be2f8483ca9). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1189.407829] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7c34af7-971e-4ab8-9455-c1173c910f3b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.420286] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Created folder: Project (48bfb708de5c4dd287530be2f8483ca9) in parent group-v353718. [ 1189.423357] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Creating folder: Instances. Parent ref: group-v353741. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1189.426134] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c550322c-1f9f-4cba-8caf-b7f1e4d2b702 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.436172] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Created folder: Instances in parent group-v353741. [ 1189.436367] env[63297]: DEBUG oslo.service.loopingcall [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1189.436719] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1189.436809] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bcf404b-db0f-4d8b-a6a6-6faee1cc9ffd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.467573] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1189.467573] env[63297]: value = "task-1696830" [ 1189.467573] env[63297]: _type = "Task" [ 1189.467573] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.478955] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696830, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.498658] env[63297]: DEBUG nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1189.561753] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696827, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603774} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.562033] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 81920a24-f406-4923-98b7-cc0f3d0ccc8b/81920a24-f406-4923-98b7-cc0f3d0ccc8b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1189.562827] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1189.563139] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e5b8a50-a726-4e28-9643-2f80a5fe8448 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.567195] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0284acb0-1596-4f40-8640-86ef1ec87a1a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.576682] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc5d375-ef37-4194-9d25-0bbe0e41d605 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.581761] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1189.581761] env[63297]: value = "task-1696831" [ 1189.581761] env[63297]: _type = "Task" [ 1189.581761] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.620680] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830ef88d-d043-426f-b5d9-cf1782df58be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.629791] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696831, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.630079] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5204955d-9f62-7ccd-314c-affe89db4cd2, 'name': SearchDatastore_Task, 'duration_secs': 0.023647} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.630711] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.631111] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 8adfd26f-1012-4e52-9371-e9d3f654046c/8adfd26f-1012-4e52-9371-e9d3f654046c.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1189.631486] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.631653] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1189.631873] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cab4f6e-49ff-4189-8e04-303d9bbe49f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.636858] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5715891-f48f-4673-aa0a-df702e8c4117 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.641421] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6fc2cd-451f-495d-b4b7-c0f0e7946c18 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.649596] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1189.649596] env[63297]: value = "task-1696832" [ 1189.649596] env[63297]: _type = "Task" [ 1189.649596] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.666743] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1189.666743] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1189.667928] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1189.670489] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d39ee425-9d65-44f9-898f-3ec7c9431ca7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.675361] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696832, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.678913] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1189.678913] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5255a465-4250-441c-951e-4316555b9e1c" [ 1189.678913] env[63297]: _type = "Task" [ 1189.678913] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.688820] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5255a465-4250-441c-951e-4316555b9e1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.757902] env[63297]: INFO nova.compute.manager [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Took 19.19 seconds to build instance. [ 1189.978337] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696830, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.034047] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.080032] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.080278] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.097116] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696831, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067048} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.097514] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1190.098238] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aaba64e-bc98-42c2-b794-fafa985814d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.128186] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 81920a24-f406-4923-98b7-cc0f3d0ccc8b/81920a24-f406-4923-98b7-cc0f3d0ccc8b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.128842] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-089bd183-133c-40b7-92e5-5d70ab6aa89d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.154484] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1190.154484] env[63297]: value = "task-1696833" [ 1190.154484] env[63297]: _type = "Task" [ 1190.154484] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.161170] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696832, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.166841] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696833, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.190746] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5255a465-4250-441c-951e-4316555b9e1c, 'name': SearchDatastore_Task, 'duration_secs': 0.017899} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.194805] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de231892-13d8-4b5f-a9eb-2e4028c2a908 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.201627] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1190.201627] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eec70f-f9d7-2f5b-de9a-138973c7493f" [ 1190.201627] env[63297]: _type = "Task" [ 1190.201627] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.210131] env[63297]: ERROR nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [req-f72c71cb-2fa1-4c74-b280-51f24cee210d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f72c71cb-2fa1-4c74-b280-51f24cee210d"}]} [ 1190.220998] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eec70f-f9d7-2f5b-de9a-138973c7493f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.236480] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1190.260564] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1190.260785] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1190.268707] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2936eddd-c801-4ba4-a632-11084a95f8b4 tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.712s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.284572] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1190.307007] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1190.491735] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696830, 'name': CreateVM_Task, 'duration_secs': 0.705906} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.491735] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1190.492687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.492687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.492835] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1190.493136] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8716186-8a5d-43dc-a6b8-06c98f603585 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.508651] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1190.508651] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52736a34-78b9-d592-778e-3df2f3b7a467" [ 1190.508651] env[63297]: _type = "Task" [ 1190.508651] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.526966] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52736a34-78b9-d592-778e-3df2f3b7a467, 'name': SearchDatastore_Task, 'duration_secs': 0.011005} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.530672] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.530923] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1190.531302] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.578233] env[63297]: DEBUG nova.compute.manager [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Received event network-changed-fab4c7d4-707f-4b4d-81ff-d6796a6fd27a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1190.578233] env[63297]: DEBUG nova.compute.manager [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Refreshing instance network info cache due to event network-changed-fab4c7d4-707f-4b4d-81ff-d6796a6fd27a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1190.578496] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Acquiring lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.578705] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Acquired lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.578875] env[63297]: DEBUG nova.network.neutron [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Refreshing network info cache for port fab4c7d4-707f-4b4d-81ff-d6796a6fd27a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1190.583662] env[63297]: DEBUG nova.compute.manager [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1190.589043] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85ca9e8-d5d3-4f5d-bc2f-827b5d16037b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.598329] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d35d4d-e2ec-4833-b43f-c2562cec4b9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.637659] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a1cf78-b80f-477c-9d79-66b9c14e29b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.647487] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5968bf64-e5f7-4906-8983-71bc58d2884f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.670021] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696832, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.670782] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1190.677348] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696833, 'name': ReconfigVM_Task, 'duration_secs': 0.459594} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.679397] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 81920a24-f406-4923-98b7-cc0f3d0ccc8b/81920a24-f406-4923-98b7-cc0f3d0ccc8b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.679397] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7f12658-d453-42eb-82e3-c4812c77859d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.687246] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1190.687246] env[63297]: value = "task-1696834" [ 1190.687246] env[63297]: _type = "Task" [ 1190.687246] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.696458] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696834, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.711850] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eec70f-f9d7-2f5b-de9a-138973c7493f, 'name': SearchDatastore_Task, 'duration_secs': 0.051084} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.712095] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.712363] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3/cce038d4-dc9a-4fae-8348-1c2f674b79e3.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1190.713041] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.713041] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1190.713041] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cac937ab-f611-4f83-b741-e9fda5e6a03a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.715049] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3ed1fe5-a41e-494f-9506-2df989986350 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.725654] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1190.725654] env[63297]: value = "task-1696835" [ 1190.725654] env[63297]: _type = "Task" [ 1190.725654] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.733115] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.733321] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1190.734198] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.734279] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2036277f-e44d-4eaa-aa67-d3cbd8926815 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.744023] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1190.744023] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52db5b7e-20d3-b5e9-1527-c739435785ea" [ 1190.744023] env[63297]: _type = "Task" [ 1190.744023] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.749629] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52db5b7e-20d3-b5e9-1527-c739435785ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.810406] env[63297]: DEBUG nova.compute.manager [req-b0f1ebc5-92d5-402a-b1ff-ed7a1eee1d74 req-608c3cad-1d63-44e9-b986-27ff085e256e service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Received event network-changed-0f8d8353-c946-4413-9b67-19c002e27040 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1190.810906] env[63297]: DEBUG nova.compute.manager [req-b0f1ebc5-92d5-402a-b1ff-ed7a1eee1d74 req-608c3cad-1d63-44e9-b986-27ff085e256e service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Refreshing instance network info cache due to event network-changed-0f8d8353-c946-4413-9b67-19c002e27040. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1190.810906] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0f1ebc5-92d5-402a-b1ff-ed7a1eee1d74 req-608c3cad-1d63-44e9-b986-27ff085e256e service nova] Acquiring lock "refresh_cache-8adfd26f-1012-4e52-9371-e9d3f654046c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.810906] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0f1ebc5-92d5-402a-b1ff-ed7a1eee1d74 req-608c3cad-1d63-44e9-b986-27ff085e256e service nova] Acquired lock "refresh_cache-8adfd26f-1012-4e52-9371-e9d3f654046c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.810906] env[63297]: DEBUG nova.network.neutron [req-b0f1ebc5-92d5-402a-b1ff-ed7a1eee1d74 req-608c3cad-1d63-44e9-b986-27ff085e256e service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Refreshing network info cache for port 0f8d8353-c946-4413-9b67-19c002e27040 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.110142] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.163359] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696832, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.197983] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696834, 'name': Rename_Task, 'duration_secs': 0.272717} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.199208] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1191.199208] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35c71728-dcdb-4664-81f3-3dc6b01f6d56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.208913] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1191.208913] env[63297]: value = "task-1696836" [ 1191.208913] env[63297]: _type = "Task" [ 1191.208913] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.215999] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 17 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1191.216318] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 17 to 18 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1191.216717] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1191.233035] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.243952] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.253670] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52db5b7e-20d3-b5e9-1527-c739435785ea, 'name': SearchDatastore_Task, 'duration_secs': 0.010731} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.255215] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebc75d55-c000-4a99-9d65-8f9289106856 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.261726] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1191.261726] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525ab06b-db8f-384d-1bec-3b234c6918bc" [ 1191.261726] env[63297]: _type = "Task" [ 1191.261726] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.271808] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525ab06b-db8f-384d-1bec-3b234c6918bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.675138] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696832, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.826997} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.675138] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 8adfd26f-1012-4e52-9371-e9d3f654046c/8adfd26f-1012-4e52-9371-e9d3f654046c.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1191.675138] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1191.675609] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68be2a2b-4b09-4a20-9230-b479d2adecba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.683337] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1191.683337] env[63297]: value = "task-1696837" [ 1191.683337] env[63297]: _type = "Task" [ 1191.683337] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.694305] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696837, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.717574] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696836, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.739589] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1191.739589] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.445s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.742476] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.893s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.747721] env[63297]: INFO nova.compute.claims [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1191.763828] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696835, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.777508] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525ab06b-db8f-384d-1bec-3b234c6918bc, 'name': SearchDatastore_Task, 'duration_secs': 0.029141} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.777807] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.778837] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ef851d71-788d-42f8-a824-5d30a89e957b/ef851d71-788d-42f8-a824-5d30a89e957b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1191.778837] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33515de5-91a4-4bd3-abcc-218d7ea5349f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.785541] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1191.785541] env[63297]: value = "task-1696838" [ 1191.785541] env[63297]: _type = "Task" [ 1191.785541] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.795585] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696838, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.871999] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Successfully updated port: 01a6609e-6d94-400f-8f01-fc67889fb600 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1191.957619] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquiring lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.957619] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.078646] env[63297]: DEBUG nova.network.neutron [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Updated VIF entry in instance network info cache for port fab4c7d4-707f-4b4d-81ff-d6796a6fd27a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1192.079033] env[63297]: DEBUG nova.network.neutron [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Updating instance_info_cache with network_info: [{"id": "fab4c7d4-707f-4b4d-81ff-d6796a6fd27a", "address": "fa:16:3e:a5:08:d2", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfab4c7d4-70", "ovs_interfaceid": "fab4c7d4-707f-4b4d-81ff-d6796a6fd27a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.116171] env[63297]: DEBUG nova.network.neutron [req-b0f1ebc5-92d5-402a-b1ff-ed7a1eee1d74 req-608c3cad-1d63-44e9-b986-27ff085e256e service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Updated VIF entry in instance network info cache for port 0f8d8353-c946-4413-9b67-19c002e27040. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1192.116171] env[63297]: DEBUG nova.network.neutron [req-b0f1ebc5-92d5-402a-b1ff-ed7a1eee1d74 req-608c3cad-1d63-44e9-b986-27ff085e256e service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Updating instance_info_cache with network_info: [{"id": "0f8d8353-c946-4413-9b67-19c002e27040", "address": "fa:16:3e:a2:46:8e", "network": {"id": "9d1cf425-da29-4651-bfc6-aa6d8ea4f410", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-739463594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "831de8a2aec2471e8eac0cf056f16265", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f8d8353-c9", "ovs_interfaceid": "0f8d8353-c946-4413-9b67-19c002e27040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.198398] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096779} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.198398] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1192.202677] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57602e10-97a7-4dc2-a4c6-a48d5e2d8e56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.226459] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 8adfd26f-1012-4e52-9371-e9d3f654046c/8adfd26f-1012-4e52-9371-e9d3f654046c.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.229282] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb1f25a6-586a-402f-9e33-b1ed98cadd27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.261025] env[63297]: DEBUG oslo_vmware.api [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1696836, 'name': PowerOnVM_Task, 'duration_secs': 0.775565} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.262598] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1192.262598] env[63297]: INFO nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Took 11.82 seconds to spawn the instance on the hypervisor. [ 1192.262763] env[63297]: DEBUG nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1192.263382] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1192.263382] env[63297]: value = "task-1696839" [ 1192.263382] env[63297]: _type = "Task" [ 1192.263382] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.264330] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43beefab-21dd-4973-aeb0-5e8a8703082b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.275817] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696835, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.199891} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.278147] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3/cce038d4-dc9a-4fae-8348-1c2f674b79e3.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1192.278147] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1192.278565] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22453ed4-2856-463b-baba-b1d4d6e987e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.288477] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696839, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.297248] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1192.297248] env[63297]: value = "task-1696840" [ 1192.297248] env[63297]: _type = "Task" [ 1192.297248] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.303711] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696838, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.312727] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.464730] env[63297]: DEBUG nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1192.556976] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquiring lock "35c68986-51b5-43ba-a076-aca3c86d68bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.557228] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.582761] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Releasing lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.583817] env[63297]: DEBUG nova.compute.manager [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received event network-vif-plugged-14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1192.583817] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Acquiring lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.583817] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.584155] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.584295] env[63297]: DEBUG nova.compute.manager [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] No waiting events found dispatching network-vif-plugged-14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1192.588551] env[63297]: WARNING nova.compute.manager [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received unexpected event network-vif-plugged-14298cd9-8999-4142-9f1f-7a512e4a09d1 for instance with vm_state building and task_state spawning. [ 1192.588823] env[63297]: DEBUG nova.compute.manager [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received event network-changed-14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1192.589029] env[63297]: DEBUG nova.compute.manager [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Refreshing instance network info cache due to event network-changed-14298cd9-8999-4142-9f1f-7a512e4a09d1. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1192.589321] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Acquiring lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.589570] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Acquired lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.589835] env[63297]: DEBUG nova.network.neutron [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Refreshing network info cache for port 14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1192.618858] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0f1ebc5-92d5-402a-b1ff-ed7a1eee1d74 req-608c3cad-1d63-44e9-b986-27ff085e256e service nova] Releasing lock "refresh_cache-8adfd26f-1012-4e52-9371-e9d3f654046c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.746812] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.747666] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.747666] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1192.747666] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1192.789904] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696839, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.801975] env[63297]: INFO nova.compute.manager [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Took 20.81 seconds to build instance. [ 1192.815379] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696838, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.965772} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.818415] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ef851d71-788d-42f8-a824-5d30a89e957b/ef851d71-788d-42f8-a824-5d30a89e957b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1192.818673] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1192.818929] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.229739} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.819813] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81ff70e2-eb9a-410c-b016-59fed1e46ea9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.822863] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1192.824590] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ff3fd5-2b04-4982-b5ea-67836542650a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.832883] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1192.832883] env[63297]: value = "task-1696841" [ 1192.832883] env[63297]: _type = "Task" [ 1192.832883] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.853563] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3/cce038d4-dc9a-4fae-8348-1c2f674b79e3.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.857170] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Acquiring lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.857413] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.857612] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Acquiring lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.857798] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.857961] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.859622] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ba1bc27-42f1-4e06-9a4d-aa8d6ae7ccc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.879682] env[63297]: INFO nova.compute.manager [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Terminating instance [ 1192.888416] env[63297]: DEBUG nova.compute.manager [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1192.888416] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1192.888416] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3263627-d95b-47d5-a953-a187175abc2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.897842] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1192.897842] env[63297]: value = "task-1696842" [ 1192.897842] env[63297]: _type = "Task" [ 1192.897842] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.897842] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696841, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.910135] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1192.910945] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15c597b5-2009-4b05-857d-ce435498e01c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.918203] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696842, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.925935] env[63297]: DEBUG oslo_vmware.api [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Waiting for the task: (returnval){ [ 1192.925935] env[63297]: value = "task-1696843" [ 1192.925935] env[63297]: _type = "Task" [ 1192.925935] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.935440] env[63297]: DEBUG oslo_vmware.api [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Task: {'id': task-1696843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.004108] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.066138] env[63297]: DEBUG nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1193.146023] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e9f24a-b181-44cf-b599-2d7233a0e696 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.156949] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c414b6c0-3bc6-4154-b9de-5f6af5da9ea0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.200644] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fb424f-5f59-4903-86dc-08334858c574 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.212716] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d2f9e3-96c7-441b-bdb4-99747d907d3c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.230332] env[63297]: DEBUG nova.compute.provider_tree [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.252274] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Skipping network cache update for instance because it is being deleted. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1193.252644] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Skipping network cache update for instance because it is Building. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1193.252877] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Skipping network cache update for instance because it is Building. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1193.253113] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Skipping network cache update for instance because it is Building. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1193.253429] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Skipping network cache update for instance because it is Building. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1193.280738] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696839, 'name': ReconfigVM_Task, 'duration_secs': 0.577438} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.280846] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 8adfd26f-1012-4e52-9371-e9d3f654046c/8adfd26f-1012-4e52-9371-e9d3f654046c.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1193.281563] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ff01e34-8a33-4688-8d2a-c7f18724849f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.291405] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1193.291405] env[63297]: value = "task-1696844" [ 1193.291405] env[63297]: _type = "Task" [ 1193.291405] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.304693] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696844, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.313209] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02c4072b-f0a1-4a87-99de-af256523e050 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.330s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.347281] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696841, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.278437} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.349889] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1193.351779] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0836f2a0-90f9-4d78-bac4-1bb28565aecb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.397189] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] ef851d71-788d-42f8-a824-5d30a89e957b/ef851d71-788d-42f8-a824-5d30a89e957b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1193.397189] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6185eb46-39ff-481a-9f33-ac2a3e521728 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.416529] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.416821] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.417081] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1193.417379] env[63297]: DEBUG nova.objects.instance [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lazy-loading 'info_cache' on Instance uuid e5f198e8-2080-4e3e-8ad5-964b855d70ff {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.436338] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1193.436338] env[63297]: value = "task-1696845" [ 1193.436338] env[63297]: _type = "Task" [ 1193.436338] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.436338] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696842, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.447291] env[63297]: DEBUG oslo_vmware.api [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Task: {'id': task-1696843, 'name': PowerOffVM_Task, 'duration_secs': 0.295797} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.447291] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1193.447291] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1193.447291] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fe5146d-aab8-4fd4-a758-13de8c9f4f5c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.451268] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.596859] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.653052] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.653298] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.653496] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Deleting the datastore file [datastore1] 3d66ef2c-ac35-4eae-a205-6dd80ee564d1 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.653754] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1dedb6f-86ba-4fb5-9062-b93da6816088 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.665311] env[63297]: DEBUG oslo_vmware.api [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Waiting for the task: (returnval){ [ 1193.665311] env[63297]: value = "task-1696847" [ 1193.665311] env[63297]: _type = "Task" [ 1193.665311] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.673994] env[63297]: DEBUG oslo_vmware.api [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Task: {'id': task-1696847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.737899] env[63297]: DEBUG nova.scheduler.client.report [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1193.802034] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696844, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.932551] env[63297]: DEBUG nova.network.neutron [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updated VIF entry in instance network info cache for port 14298cd9-8999-4142-9f1f-7a512e4a09d1. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1193.932963] env[63297]: DEBUG nova.network.neutron [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [{"id": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "address": "fa:16:3e:d1:15:40", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14298cd9-89", "ovs_interfaceid": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.947650] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696842, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.961524] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.012064] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "4438e230-0589-48ae-8848-d1f8414efa61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.012414] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "4438e230-0589-48ae-8848-d1f8414efa61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.059265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquiring lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.059265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.059265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquiring lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.059265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.059551] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.063308] env[63297]: INFO nova.compute.manager [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Terminating instance [ 1194.065846] env[63297]: DEBUG nova.compute.manager [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1194.066985] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1194.067418] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6216e38e-467b-4185-b905-13599abb257f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.079254] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.079446] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abb4a92a-88be-4fdc-90f7-4e89eafa233b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.091266] env[63297]: DEBUG oslo_vmware.api [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1194.091266] env[63297]: value = "task-1696848" [ 1194.091266] env[63297]: _type = "Task" [ 1194.091266] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.105463] env[63297]: DEBUG oslo_vmware.api [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696848, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.177952] env[63297]: DEBUG oslo_vmware.api [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Task: {'id': task-1696847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.249034] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.249790] env[63297]: DEBUG nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1194.252508] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.738s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.253996] env[63297]: INFO nova.compute.claims [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.306501] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696844, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.432514] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696842, 'name': ReconfigVM_Task, 'duration_secs': 1.505515} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.432791] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Reconfigured VM instance instance-00000003 to attach disk [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3/cce038d4-dc9a-4fae-8348-1c2f674b79e3.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1194.433867] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae7add7a-47ce-49dc-b90e-87fbb5a58ff5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.437907] env[63297]: DEBUG oslo_concurrency.lockutils [req-f83a52a3-b1a1-41be-8a20-2c60672467ec req-60d7d672-b53e-4b76-8e5d-e7476a48bcc5 service nova] Releasing lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.445197] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1194.445197] env[63297]: value = "task-1696849" [ 1194.445197] env[63297]: _type = "Task" [ 1194.445197] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.458396] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.461922] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696849, 'name': Rename_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.521300] env[63297]: DEBUG nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1194.607765] env[63297]: DEBUG oslo_vmware.api [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696848, 'name': PowerOffVM_Task, 'duration_secs': 0.450555} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.607765] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.607879] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1194.608112] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2b8e2ed-71ab-4534-ab3b-9074f189acdb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.676716] env[63297]: DEBUG oslo_vmware.api [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Task: {'id': task-1696847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.632822} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.677043] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1194.677235] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1194.677408] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1194.677788] env[63297]: INFO nova.compute.manager [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Took 1.79 seconds to destroy the instance on the hypervisor. [ 1194.677894] env[63297]: DEBUG oslo.service.loopingcall [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1194.678131] env[63297]: DEBUG nova.compute.manager [-] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1194.678131] env[63297]: DEBUG nova.network.neutron [-] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1194.707380] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1194.707380] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1194.707380] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Deleting the datastore file [datastore1] e5f198e8-2080-4e3e-8ad5-964b855d70ff {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1194.707380] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f51935b-931d-4d79-a048-c67812ec6698 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.715567] env[63297]: DEBUG oslo_vmware.api [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for the task: (returnval){ [ 1194.715567] env[63297]: value = "task-1696851" [ 1194.715567] env[63297]: _type = "Task" [ 1194.715567] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.724538] env[63297]: DEBUG oslo_vmware.api [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.758249] env[63297]: DEBUG nova.compute.utils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1194.764122] env[63297]: DEBUG nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1194.764122] env[63297]: DEBUG nova.network.neutron [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1194.803193] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696844, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.876244] env[63297]: DEBUG nova.policy [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c45e9386c62044d9ab921956cf4e6057', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15f506a4251d434aaf92405307f98a67', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1194.919512] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Successfully updated port: f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1194.961751] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696845, 'name': ReconfigVM_Task, 'duration_secs': 1.241236} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.965587] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Reconfigured VM instance instance-00000008 to attach disk [datastore1] ef851d71-788d-42f8-a824-5d30a89e957b/ef851d71-788d-42f8-a824-5d30a89e957b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1194.966764] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696849, 'name': Rename_Task, 'duration_secs': 0.147083} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.966964] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78372bcc-81f3-440e-80cc-e2ae79cc2110 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.968706] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1194.968925] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-409e8743-df7a-43a3-831d-d096d73de4a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.976668] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1194.976668] env[63297]: value = "task-1696853" [ 1194.976668] env[63297]: _type = "Task" [ 1194.976668] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.977860] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1194.977860] env[63297]: value = "task-1696852" [ 1194.977860] env[63297]: _type = "Task" [ 1194.977860] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.992348] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696853, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.996855] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696852, 'name': Rename_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.050994] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.183063] env[63297]: DEBUG nova.compute.manager [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-vif-plugged-01a6609e-6d94-400f-8f01-fc67889fb600 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1195.183443] env[63297]: DEBUG oslo_concurrency.lockutils [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] Acquiring lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.184482] env[63297]: DEBUG oslo_concurrency.lockutils [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.184654] env[63297]: DEBUG oslo_concurrency.lockutils [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.184964] env[63297]: DEBUG nova.compute.manager [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] No waiting events found dispatching network-vif-plugged-01a6609e-6d94-400f-8f01-fc67889fb600 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1195.185166] env[63297]: WARNING nova.compute.manager [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received unexpected event network-vif-plugged-01a6609e-6d94-400f-8f01-fc67889fb600 for instance with vm_state building and task_state spawning. [ 1195.185327] env[63297]: DEBUG nova.compute.manager [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-changed-01a6609e-6d94-400f-8f01-fc67889fb600 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1195.185528] env[63297]: DEBUG nova.compute.manager [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Refreshing instance network info cache due to event network-changed-01a6609e-6d94-400f-8f01-fc67889fb600. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1195.185674] env[63297]: DEBUG oslo_concurrency.lockutils [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] Acquiring lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.186309] env[63297]: DEBUG oslo_concurrency.lockutils [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] Acquired lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.189029] env[63297]: DEBUG nova.network.neutron [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Refreshing network info cache for port 01a6609e-6d94-400f-8f01-fc67889fb600 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.226716] env[63297]: DEBUG oslo_vmware.api [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Task: {'id': task-1696851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134521} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.228038] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1195.228811] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1195.229041] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1195.229274] env[63297]: INFO nova.compute.manager [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1195.229524] env[63297]: DEBUG oslo.service.loopingcall [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1195.230267] env[63297]: DEBUG nova.compute.manager [-] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1195.230370] env[63297]: DEBUG nova.network.neutron [-] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1195.268398] env[63297]: DEBUG nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1195.306385] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696844, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.359871] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Updating instance_info_cache with network_info: [{"id": "76456429-f255-412c-910b-a21b9ee6408a", "address": "fa:16:3e:55:1f:b4", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76456429-f2", "ovs_interfaceid": "76456429-f255-412c-910b-a21b9ee6408a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.446329] env[63297]: DEBUG nova.network.neutron [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Successfully created port: ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1195.498194] env[63297]: DEBUG oslo_vmware.api [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696853, 'name': PowerOnVM_Task, 'duration_secs': 0.474624} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.501907] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1195.505123] env[63297]: DEBUG nova.compute.manager [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1195.505123] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696852, 'name': Rename_Task, 'duration_secs': 0.17971} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.507314] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d00897b-9996-4088-945d-ae7cb51d2b68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.512404] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1195.512404] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-460ad319-60a8-4900-8908-0027080a8472 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.524165] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1195.524165] env[63297]: value = "task-1696854" [ 1195.524165] env[63297]: _type = "Task" [ 1195.524165] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.537277] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696854, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.582687] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f8b203-0987-4de5-b9be-47b5d5f127a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.594992] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44deb31e-d5e8-47f0-8a3d-79c20ab85661 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.634876] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c46f339-7a50-4e99-b18f-d0f17f1908c3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.645835] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70f1ad0-fee3-45b1-a4f3-29ed78ff590b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.669037] env[63297]: DEBUG nova.compute.provider_tree [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.714384] env[63297]: DEBUG nova.network.neutron [-] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.746179] env[63297]: DEBUG nova.network.neutron [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1195.803041] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696844, 'name': Rename_Task, 'duration_secs': 2.112488} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.803277] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1195.803534] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de163110-78bc-4396-a2de-7bb8e3312d7c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.810209] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1195.810209] env[63297]: value = "task-1696855" [ 1195.810209] env[63297]: _type = "Task" [ 1195.810209] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.818500] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.863848] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-e5f198e8-2080-4e3e-8ad5-964b855d70ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.864014] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1195.864151] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.899357] env[63297]: DEBUG nova.network.neutron [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.038562] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.046139] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696854, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.154411] env[63297]: DEBUG nova.network.neutron [-] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.173316] env[63297]: DEBUG nova.scheduler.client.report [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1196.218103] env[63297]: INFO nova.compute.manager [-] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Took 1.54 seconds to deallocate network for instance. [ 1196.289247] env[63297]: DEBUG nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1196.322764] env[63297]: DEBUG oslo_vmware.api [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696855, 'name': PowerOnVM_Task, 'duration_secs': 0.502645} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.325555] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1196.325869] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1196.325936] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.326775] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1196.327279] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.327279] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1196.327388] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1196.327493] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1196.327651] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1196.328407] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1196.328407] env[63297]: DEBUG nova.virt.hardware [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1196.328407] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1196.328588] env[63297]: INFO nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Took 13.58 seconds to spawn the instance on the hypervisor. [ 1196.329020] env[63297]: DEBUG nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1196.329870] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1f567e-ac82-4a1e-95ec-c85f45db33de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.333811] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03891cdf-8e7e-4092-b4b1-e57d99c6ac0f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.345631] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83949d8-02ae-4c24-9305-778690d4b146 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.402743] env[63297]: DEBUG oslo_concurrency.lockutils [req-7247d1f2-e2b9-473d-a716-15a6129438e7 req-c4c43c22-7bac-4866-be7c-409f5c4b4d35 service nova] Releasing lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.420421] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquiring lock "746742ac-8d7a-466b-8bc0-043cb5422111" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.420421] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "746742ac-8d7a-466b-8bc0-043cb5422111" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.541328] env[63297]: DEBUG oslo_vmware.api [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1696854, 'name': PowerOnVM_Task, 'duration_secs': 0.784904} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.541653] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1196.541868] env[63297]: INFO nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Took 10.53 seconds to spawn the instance on the hypervisor. [ 1196.542089] env[63297]: DEBUG nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1196.543658] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa478ee-3906-4ebb-95c8-9573de074ee0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.659328] env[63297]: INFO nova.compute.manager [-] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Took 1.43 seconds to deallocate network for instance. [ 1196.679731] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.680470] env[63297]: DEBUG nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1196.683881] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.650s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.685514] env[63297]: INFO nova.compute.claims [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1196.727029] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.759716] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.760014] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.760980] env[63297]: DEBUG nova.compute.manager [None req-b6cd0865-1949-404c-882f-6e03eff18c9c tempest-ServerDiagnosticsV248Test-1977235905 tempest-ServerDiagnosticsV248Test-1977235905-project-admin] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1196.762142] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928f41f3-8fa7-4a80-baa6-9281c8fa93fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.769679] env[63297]: INFO nova.compute.manager [None req-b6cd0865-1949-404c-882f-6e03eff18c9c tempest-ServerDiagnosticsV248Test-1977235905 tempest-ServerDiagnosticsV248Test-1977235905-project-admin] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Retrieving diagnostics [ 1196.771465] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b27662-aea3-48d7-9e86-6f5b041df4e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.859556] env[63297]: INFO nova.compute.manager [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Took 23.18 seconds to build instance. [ 1197.065767] env[63297]: INFO nova.compute.manager [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Took 22.94 seconds to build instance. [ 1197.166325] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.184947] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquiring lock "459d5a17-182b-4284-b464-57d342981031" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.185205] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "459d5a17-182b-4284-b464-57d342981031" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.193462] env[63297]: DEBUG nova.compute.utils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1197.198306] env[63297]: DEBUG nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1197.198306] env[63297]: DEBUG nova.network.neutron [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1197.290827] env[63297]: DEBUG nova.compute.manager [req-1b61f8ee-255f-4340-bba9-022b8b1e91c9 req-763feafe-ef2f-4019-bf23-79beef265579 service nova] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Received event network-vif-deleted-de9a987f-1f14-4610-a5aa-6271a1dd49c1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1197.296854] env[63297]: DEBUG nova.policy [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc041fcfaf8543ed829cb62fffa3b883', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa1a1e0788594eb292e3fce25ed37bd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1197.362723] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f5612f55-e2d8-42f6-9250-7868392bc3de tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "8adfd26f-1012-4e52-9371-e9d3f654046c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.692s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.410759] env[63297]: DEBUG nova.network.neutron [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Successfully updated port: ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1197.430748] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Successfully updated port: 881aea0b-28e5-4b91-af8d-d9c7c69b6446 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1197.491899] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquiring lock "754e64ec-b6fa-49d8-9de6-ef38918378fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.492357] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "754e64ec-b6fa-49d8-9de6-ef38918378fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.570274] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dd6df5b2-6343-45b2-94d1-4a6ee43e3056 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "ef851d71-788d-42f8-a824-5d30a89e957b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.455s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.697608] env[63297]: DEBUG nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1197.868681] env[63297]: DEBUG nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1197.917031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquiring lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.917212] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquired lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.917362] env[63297]: DEBUG nova.network.neutron [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1197.934508] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.934644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquired lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.934795] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1197.982303] env[63297]: DEBUG nova.network.neutron [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Successfully created port: 3db68b1a-d911-4324-b993-dc755277e56b {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1198.021019] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d3ea1f-55cd-4b82-b14b-a75fc61750b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.027885] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d342e279-c308-44d1-82fe-d09c433e43f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.062648] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6546bb-d0f9-4696-853e-8037880be38b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.071131] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac3d4ec-1215-4a90-885a-b25a8feb7f62 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.075348] env[63297]: DEBUG nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1198.087597] env[63297]: DEBUG nova.compute.provider_tree [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.398455] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.412952] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "b14e8466-68ab-4705-a439-6db961a149b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.413375] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "b14e8466-68ab-4705-a439-6db961a149b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.502309] env[63297]: DEBUG nova.network.neutron [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1198.519254] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1198.592732] env[63297]: DEBUG nova.scheduler.client.report [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1198.604661] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.709244] env[63297]: DEBUG nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1198.743765] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1198.744929] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1198.744929] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1198.744929] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1198.745106] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1198.745372] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1198.745733] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1198.746103] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1198.746410] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1198.747020] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1198.747566] env[63297]: DEBUG nova.virt.hardware [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1198.748856] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a192ec-c524-4e3c-a574-adaf5cf1af70 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.760538] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d750d34-ccef-4cd8-82d6-3dfd4ce9b9c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.826124] env[63297]: DEBUG nova.network.neutron [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Updating instance_info_cache with network_info: [{"id": "ba02b0f2-d414-4714-b54a-10f89df1af3a", "address": "fa:16:3e:4d:c6:d2", "network": {"id": "8be91b0b-92c2-4755-a194-c3063691c530", "bridge": "br-int", "label": "tempest-ServersTestJSON-2064421146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f506a4251d434aaf92405307f98a67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba02b0f2-d4", "ovs_interfaceid": "ba02b0f2-d414-4714-b54a-10f89df1af3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.058894] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquiring lock "8adfd26f-1012-4e52-9371-e9d3f654046c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.059288] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "8adfd26f-1012-4e52-9371-e9d3f654046c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.059587] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquiring lock "8adfd26f-1012-4e52-9371-e9d3f654046c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.060927] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "8adfd26f-1012-4e52-9371-e9d3f654046c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.061180] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "8adfd26f-1012-4e52-9371-e9d3f654046c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.066069] env[63297]: DEBUG nova.compute.manager [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-vif-plugged-f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1199.067260] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] Acquiring lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.067511] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.067688] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.067874] env[63297]: DEBUG nova.compute.manager [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] No waiting events found dispatching network-vif-plugged-f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1199.068061] env[63297]: WARNING nova.compute.manager [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received unexpected event network-vif-plugged-f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6 for instance with vm_state building and task_state spawning. [ 1199.068230] env[63297]: DEBUG nova.compute.manager [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-changed-f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1199.068400] env[63297]: DEBUG nova.compute.manager [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Refreshing instance network info cache due to event network-changed-f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1199.068538] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] Acquiring lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.069071] env[63297]: INFO nova.compute.manager [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Terminating instance [ 1199.073384] env[63297]: DEBUG nova.compute.manager [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1199.073577] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1199.074965] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce36c1f3-6c54-4e13-9527-253e2e86fc9e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.085404] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1199.085931] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ac4cc7a-7171-4400-a198-c7ae20da0f99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.096133] env[63297]: DEBUG oslo_vmware.api [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1199.096133] env[63297]: value = "task-1696856" [ 1199.096133] env[63297]: _type = "Task" [ 1199.096133] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.102839] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.417s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.102839] env[63297]: DEBUG nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1199.106730] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.996s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.107769] env[63297]: INFO nova.compute.claims [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1199.116925] env[63297]: DEBUG oslo_vmware.api [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696856, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.236537] env[63297]: DEBUG nova.network.neutron [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Updating instance_info_cache with network_info: [{"id": "01a6609e-6d94-400f-8f01-fc67889fb600", "address": "fa:16:3e:99:b9:3e", "network": {"id": "ce82ab69-5179-43bb-b12a-72dfa2219f9c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-242865171", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01a6609e-6d", "ovs_interfaceid": "01a6609e-6d94-400f-8f01-fc67889fb600", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6", "address": "fa:16:3e:f0:25:de", "network": {"id": "29ec6440-32c1-4238-a3e1-836811433d63", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1995634363", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b65dc1-9b", "ovs_interfaceid": "f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "881aea0b-28e5-4b91-af8d-d9c7c69b6446", "address": "fa:16:3e:e3:c7:17", "network": {"id": "ce82ab69-5179-43bb-b12a-72dfa2219f9c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-242865171", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap881aea0b-28", "ovs_interfaceid": "881aea0b-28e5-4b91-af8d-d9c7c69b6446", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.328932] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Releasing lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.329327] env[63297]: DEBUG nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Instance network_info: |[{"id": "ba02b0f2-d414-4714-b54a-10f89df1af3a", "address": "fa:16:3e:4d:c6:d2", "network": {"id": "8be91b0b-92c2-4755-a194-c3063691c530", "bridge": "br-int", "label": "tempest-ServersTestJSON-2064421146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f506a4251d434aaf92405307f98a67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba02b0f2-d4", "ovs_interfaceid": "ba02b0f2-d414-4714-b54a-10f89df1af3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1199.329863] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:c6:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba02b0f2-d414-4714-b54a-10f89df1af3a', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1199.337883] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Creating folder: Project (15f506a4251d434aaf92405307f98a67). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1199.338013] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd5b8d1b-f1a4-4314-affc-91a4d63816f9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.353942] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Created folder: Project (15f506a4251d434aaf92405307f98a67) in parent group-v353718. [ 1199.354404] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Creating folder: Instances. Parent ref: group-v353744. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1199.354948] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-702b1073-a138-4ba9-8374-278ca47251a3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.367229] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Created folder: Instances in parent group-v353744. [ 1199.367511] env[63297]: DEBUG oslo.service.loopingcall [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1199.367710] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1199.367916] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ecde49c-8f4f-4690-a61c-34361283c9e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.390406] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1199.390406] env[63297]: value = "task-1696859" [ 1199.390406] env[63297]: _type = "Task" [ 1199.390406] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.400469] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696859, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.609063] env[63297]: DEBUG nova.compute.utils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1199.610736] env[63297]: DEBUG oslo_vmware.api [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696856, 'name': PowerOffVM_Task, 'duration_secs': 0.22669} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.611526] env[63297]: DEBUG nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1199.611810] env[63297]: DEBUG nova.network.neutron [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1199.616745] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1199.616924] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1199.617748] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-539dd38f-e746-42a7-af16-0e4969fab73b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.686773] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "f3a579de-1f29-4b67-8dc8-07ea37267001" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.687030] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "f3a579de-1f29-4b67-8dc8-07ea37267001" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.706136] env[63297]: DEBUG nova.policy [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83ddfb07a6d648be89aff489836cff7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '734a95312d7d4da38201457d4f542a9e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1199.714337] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1199.714622] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1199.714874] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Deleting the datastore file [datastore1] 8adfd26f-1012-4e52-9371-e9d3f654046c {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1199.715210] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dff2f540-5d83-42ef-b095-d09e388ed86f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.722665] env[63297]: DEBUG oslo_vmware.api [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for the task: (returnval){ [ 1199.722665] env[63297]: value = "task-1696861" [ 1199.722665] env[63297]: _type = "Task" [ 1199.722665] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.732544] env[63297]: DEBUG oslo_vmware.api [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696861, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.742503] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Releasing lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.743304] env[63297]: DEBUG nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Instance network_info: |[{"id": "01a6609e-6d94-400f-8f01-fc67889fb600", "address": "fa:16:3e:99:b9:3e", "network": {"id": "ce82ab69-5179-43bb-b12a-72dfa2219f9c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-242865171", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01a6609e-6d", "ovs_interfaceid": "01a6609e-6d94-400f-8f01-fc67889fb600", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6", "address": "fa:16:3e:f0:25:de", "network": {"id": "29ec6440-32c1-4238-a3e1-836811433d63", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1995634363", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b65dc1-9b", "ovs_interfaceid": "f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "881aea0b-28e5-4b91-af8d-d9c7c69b6446", "address": "fa:16:3e:e3:c7:17", "network": {"id": "ce82ab69-5179-43bb-b12a-72dfa2219f9c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-242865171", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap881aea0b-28", "ovs_interfaceid": "881aea0b-28e5-4b91-af8d-d9c7c69b6446", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1199.743304] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] Acquired lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.743304] env[63297]: DEBUG nova.network.neutron [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Refreshing network info cache for port f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1199.744624] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:b9:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01a6609e-6d94-400f-8f01-fc67889fb600', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:25:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:c7:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '881aea0b-28e5-4b91-af8d-d9c7c69b6446', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1199.758686] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Creating folder: Project (babfd205ed454924b0bceb1d03fcfdf2). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1199.768383] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db568d0d-f3ee-47c8-af66-c0f859f8fae5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.780078] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Created folder: Project (babfd205ed454924b0bceb1d03fcfdf2) in parent group-v353718. [ 1199.780284] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Creating folder: Instances. Parent ref: group-v353747. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1199.780528] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf90b3df-5f8b-425b-9152-f2d4494f2ef0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.790533] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Created folder: Instances in parent group-v353747. [ 1199.790533] env[63297]: DEBUG oslo.service.loopingcall [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1199.790702] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1199.790908] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50a6412b-df92-4216-8c5c-589f1cbc38b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.816145] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1199.816145] env[63297]: value = "task-1696864" [ 1199.816145] env[63297]: _type = "Task" [ 1199.816145] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.827259] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696864, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.855581] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquiring lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.855884] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.856135] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquiring lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.856387] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.856693] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.859317] env[63297]: INFO nova.compute.manager [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Terminating instance [ 1199.861382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquiring lock "refresh_cache-7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.861550] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquired lock "refresh_cache-7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.861729] env[63297]: DEBUG nova.network.neutron [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1199.902861] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696859, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.922849] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.923184] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.923456] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.923648] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.923841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.926770] env[63297]: INFO nova.compute.manager [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Terminating instance [ 1199.929027] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "refresh_cache-cce038d4-dc9a-4fae-8348-1c2f674b79e3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.929222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquired lock "refresh_cache-cce038d4-dc9a-4fae-8348-1c2f674b79e3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.929340] env[63297]: DEBUG nova.network.neutron [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.121270] env[63297]: DEBUG nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1200.207918] env[63297]: DEBUG nova.network.neutron [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Updated VIF entry in instance network info cache for port f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1200.208434] env[63297]: DEBUG nova.network.neutron [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Updating instance_info_cache with network_info: [{"id": "01a6609e-6d94-400f-8f01-fc67889fb600", "address": "fa:16:3e:99:b9:3e", "network": {"id": "ce82ab69-5179-43bb-b12a-72dfa2219f9c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-242865171", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01a6609e-6d", "ovs_interfaceid": "01a6609e-6d94-400f-8f01-fc67889fb600", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6", "address": "fa:16:3e:f0:25:de", "network": {"id": "29ec6440-32c1-4238-a3e1-836811433d63", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1995634363", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b65dc1-9b", "ovs_interfaceid": "f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "881aea0b-28e5-4b91-af8d-d9c7c69b6446", "address": "fa:16:3e:e3:c7:17", "network": {"id": "ce82ab69-5179-43bb-b12a-72dfa2219f9c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-242865171", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap881aea0b-28", "ovs_interfaceid": "881aea0b-28e5-4b91-af8d-d9c7c69b6446", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.236544] env[63297]: DEBUG oslo_vmware.api [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Task: {'id': task-1696861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241192} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.241356] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1200.241356] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1200.241505] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1200.241817] env[63297]: INFO nova.compute.manager [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1200.241923] env[63297]: DEBUG oslo.service.loopingcall [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1200.242504] env[63297]: DEBUG nova.compute.manager [-] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1200.242603] env[63297]: DEBUG nova.network.neutron [-] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1200.301421] env[63297]: DEBUG nova.network.neutron [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Successfully created port: e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1200.329154] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696864, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.405522] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696859, 'name': CreateVM_Task, 'duration_secs': 0.518789} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.405522] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1200.405522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.405522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.405522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1200.405803] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64bea2d4-c8f4-4503-aed5-aa871201a84a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.411717] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1200.411717] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f69997-c929-59be-e74b-acf2db627b04" [ 1200.411717] env[63297]: _type = "Task" [ 1200.411717] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.419297] env[63297]: DEBUG nova.network.neutron [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1200.427723] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f69997-c929-59be-e74b-acf2db627b04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.481245] env[63297]: DEBUG nova.network.neutron [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1200.511765] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71537070-990b-4157-abfd-137fb83b4030 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.523493] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e29323-f24d-40d0-91e6-422769c0060e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.564212] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faedd04c-cc66-45c8-9590-ccf118ca54d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.568367] env[63297]: DEBUG nova.network.neutron [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Successfully updated port: 3db68b1a-d911-4324-b993-dc755277e56b {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1200.572357] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d803529-f48d-429f-8584-ea0703b1bb13 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.589152] env[63297]: DEBUG nova.compute.provider_tree [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.655756] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "14b4518e-044a-451a-845d-fa3742e5b3e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.655756] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "14b4518e-044a-451a-845d-fa3742e5b3e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.667131] env[63297]: DEBUG nova.network.neutron [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.711446] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cfef494-e7ff-4b88-abd1-cd4056c4f427 req-5f33448d-f061-4c80-88b9-46c1376d71fa service nova] Releasing lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.739138] env[63297]: DEBUG nova.network.neutron [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.828075] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696864, 'name': CreateVM_Task, 'duration_secs': 0.638998} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.828280] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1200.829094] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.925735] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f69997-c929-59be-e74b-acf2db627b04, 'name': SearchDatastore_Task, 'duration_secs': 0.01725} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.926116] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.926528] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1200.926832] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.927048] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.927292] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1200.928019] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.928548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1200.928835] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d23727aa-02ef-484e-aeaf-787b6cd95fc5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.931267] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a7acbf1-2f75-4855-b6bc-2d484694e5b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.937275] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1200.937275] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52332b3e-dd97-dbd9-36c7-2d0750ad103c" [ 1200.937275] env[63297]: _type = "Task" [ 1200.937275] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.943015] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1200.943249] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1200.944583] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70904aa1-64f9-41eb-97af-5feb94088fdc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.951166] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52332b3e-dd97-dbd9-36c7-2d0750ad103c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.954689] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1200.954689] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e493e2-649e-7626-d29f-bdcfacbe6c8d" [ 1200.954689] env[63297]: _type = "Task" [ 1200.954689] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.964981] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e493e2-649e-7626-d29f-bdcfacbe6c8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.074443] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1201.074443] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.074443] env[63297]: DEBUG nova.network.neutron [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1201.093835] env[63297]: DEBUG nova.scheduler.client.report [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1201.134045] env[63297]: DEBUG nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1201.156490] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:22:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='448567488',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-322478014',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1201.156490] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1201.159164] env[63297]: DEBUG nova.virt.hardware [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1201.159534] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8db99d-41e7-4ff5-8e4e-6deadeab70c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.167864] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9430e9-5824-4172-851f-7ec795a3a02f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.172486] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Releasing lock "refresh_cache-7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.172862] env[63297]: DEBUG nova.compute.manager [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1201.173064] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.174168] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee5f07a-50a2-40e4-9335-06caca228223 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.183235] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.190902] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-247e1749-e32e-4f2e-90db-42f53b2061eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.197801] env[63297]: DEBUG oslo_vmware.api [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1201.197801] env[63297]: value = "task-1696865" [ 1201.197801] env[63297]: _type = "Task" [ 1201.197801] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.206762] env[63297]: DEBUG oslo_vmware.api [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696865, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.242699] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Releasing lock "refresh_cache-cce038d4-dc9a-4fae-8348-1c2f674b79e3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.242973] env[63297]: DEBUG nova.compute.manager [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1201.243217] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.244440] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eec209-9f79-4b43-b71d-c83b53bc29dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.251621] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.252072] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79c94cf6-6855-4111-b91a-e84cc3af9bf6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.258898] env[63297]: DEBUG oslo_vmware.api [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1201.258898] env[63297]: value = "task-1696866" [ 1201.258898] env[63297]: _type = "Task" [ 1201.258898] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.270294] env[63297]: DEBUG oslo_vmware.api [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.448981] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52332b3e-dd97-dbd9-36c7-2d0750ad103c, 'name': SearchDatastore_Task, 'duration_secs': 0.020155} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.448981] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.448981] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1201.449200] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1201.463920] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e493e2-649e-7626-d29f-bdcfacbe6c8d, 'name': SearchDatastore_Task, 'duration_secs': 0.026308} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.468367] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50771a72-60e1-42dc-8721-9ed4060b70ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.473314] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1201.473314] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d1382-3b59-c9a2-baa7-981568b16258" [ 1201.473314] env[63297]: _type = "Task" [ 1201.473314] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.487083] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d1382-3b59-c9a2-baa7-981568b16258, 'name': SearchDatastore_Task, 'duration_secs': 0.01027} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.487083] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.487083] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ef57101e-1d8a-4ad5-ad68-cad2dbea33d1/ef57101e-1d8a-4ad5-ad68-cad2dbea33d1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1201.487083] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.487373] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1201.487683] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4c50a39-fad6-4171-9080-0b0a17065bf3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.492191] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21f15319-b11d-4e1b-9a4f-b89a3d5ec683 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.493539] env[63297]: DEBUG nova.network.neutron [-] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.499974] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1201.499974] env[63297]: value = "task-1696867" [ 1201.499974] env[63297]: _type = "Task" [ 1201.499974] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.505838] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1201.507255] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1201.507867] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e255e337-67e6-488d-8b9d-94986a5de9d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.515332] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.518721] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1201.518721] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a93eb-dd34-b07d-915a-f1edd072f618" [ 1201.518721] env[63297]: _type = "Task" [ 1201.518721] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.527175] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a93eb-dd34-b07d-915a-f1edd072f618, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.599289] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.599632] env[63297]: DEBUG nova.compute.manager [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1201.602528] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.599s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.604046] env[63297]: INFO nova.compute.claims [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1201.620640] env[63297]: DEBUG nova.network.neutron [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1201.709999] env[63297]: DEBUG oslo_vmware.api [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696865, 'name': PowerOffVM_Task, 'duration_secs': 0.131309} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.710291] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.710453] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.710726] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef53ccb0-385f-4010-818c-270ceb4c348a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.738275] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.738668] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.739036] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Deleting the datastore file [datastore1] 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.739467] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b8aea39-3ec7-47a6-b7aa-e0c7b77d0e77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.747405] env[63297]: DEBUG oslo_vmware.api [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for the task: (returnval){ [ 1201.747405] env[63297]: value = "task-1696869" [ 1201.747405] env[63297]: _type = "Task" [ 1201.747405] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.762105] env[63297]: DEBUG oslo_vmware.api [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.775626] env[63297]: DEBUG oslo_vmware.api [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696866, 'name': PowerOffVM_Task, 'duration_secs': 0.135379} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.775964] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.776072] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.776455] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63f84ee0-5147-4090-aec4-de15947f7f51 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.810414] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.810807] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.810937] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Deleting the datastore file [datastore1] cce038d4-dc9a-4fae-8348-1c2f674b79e3 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.811388] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba58a67a-8cd7-461f-9113-2f7467e9d561 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.821560] env[63297]: DEBUG nova.network.neutron [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance_info_cache with network_info: [{"id": "3db68b1a-d911-4324-b993-dc755277e56b", "address": "fa:16:3e:c3:7e:ea", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3db68b1a-d9", "ovs_interfaceid": "3db68b1a-d911-4324-b993-dc755277e56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.827381] env[63297]: DEBUG oslo_vmware.api [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for the task: (returnval){ [ 1201.827381] env[63297]: value = "task-1696871" [ 1201.827381] env[63297]: _type = "Task" [ 1201.827381] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.835627] env[63297]: DEBUG oslo_vmware.api [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.845658] env[63297]: DEBUG nova.compute.manager [req-03b0229a-315d-4253-a186-18c128ea0fc6 req-ebb0bf29-17a3-4167-b9e3-135f5e8658b0 service nova] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Received event network-vif-deleted-76456429-f255-412c-910b-a21b9ee6408a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1201.997348] env[63297]: INFO nova.compute.manager [-] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Took 1.75 seconds to deallocate network for instance. [ 1202.012498] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696867, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.034759] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a93eb-dd34-b07d-915a-f1edd072f618, 'name': SearchDatastore_Task, 'duration_secs': 0.008195} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.035586] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9288d776-2dcb-42af-a550-8da0fc59b005 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.040901] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1202.040901] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526c85cd-ef62-0d53-9393-f7a176cb99ef" [ 1202.040901] env[63297]: _type = "Task" [ 1202.040901] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.054991] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526c85cd-ef62-0d53-9393-f7a176cb99ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.110270] env[63297]: DEBUG nova.compute.utils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1202.112431] env[63297]: DEBUG nova.compute.manager [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Not allocating networking since 'none' was specified. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1202.259069] env[63297]: DEBUG oslo_vmware.api [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Task: {'id': task-1696869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.329039} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.259069] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.259440] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.259440] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.259440] env[63297]: INFO nova.compute.manager [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1202.259690] env[63297]: DEBUG oslo.service.loopingcall [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.259851] env[63297]: DEBUG nova.compute.manager [-] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1202.260584] env[63297]: DEBUG nova.network.neutron [-] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.290855] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Received event network-vif-plugged-ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1202.295020] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Acquiring lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.295020] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.295020] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.295020] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] No waiting events found dispatching network-vif-plugged-ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1202.295020] env[63297]: WARNING nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Received unexpected event network-vif-plugged-ba02b0f2-d414-4714-b54a-10f89df1af3a for instance with vm_state building and task_state spawning. [ 1202.295020] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-vif-plugged-881aea0b-28e5-4b91-af8d-d9c7c69b6446 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1202.295020] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Acquiring lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.295020] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.295020] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.295020] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] No waiting events found dispatching network-vif-plugged-881aea0b-28e5-4b91-af8d-d9c7c69b6446 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1202.295020] env[63297]: WARNING nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received unexpected event network-vif-plugged-881aea0b-28e5-4b91-af8d-d9c7c69b6446 for instance with vm_state building and task_state spawning. [ 1202.295020] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Received event network-changed-ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1202.295020] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Refreshing instance network info cache due to event network-changed-ba02b0f2-d414-4714-b54a-10f89df1af3a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1202.295020] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Acquiring lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.295020] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Acquired lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.296252] env[63297]: DEBUG nova.network.neutron [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Refreshing network info cache for port ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1202.296252] env[63297]: DEBUG nova.network.neutron [-] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1202.323834] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.324211] env[63297]: DEBUG nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Instance network_info: |[{"id": "3db68b1a-d911-4324-b993-dc755277e56b", "address": "fa:16:3e:c3:7e:ea", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3db68b1a-d9", "ovs_interfaceid": "3db68b1a-d911-4324-b993-dc755277e56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1202.324748] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:7e:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3db68b1a-d911-4324-b993-dc755277e56b', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1202.333886] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Creating folder: Project (fa1a1e0788594eb292e3fce25ed37bd2). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1202.335078] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54cb063c-3e4a-42cb-80fa-4d54203fad2b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.345948] env[63297]: DEBUG oslo_vmware.api [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Task: {'id': task-1696871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252389} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.346213] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.346391] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.346562] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.346733] env[63297]: INFO nova.compute.manager [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1202.346992] env[63297]: DEBUG oslo.service.loopingcall [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.348265] env[63297]: DEBUG nova.compute.manager [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1202.348370] env[63297]: DEBUG nova.network.neutron [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.349958] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Created folder: Project (fa1a1e0788594eb292e3fce25ed37bd2) in parent group-v353718. [ 1202.350148] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Creating folder: Instances. Parent ref: group-v353750. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1202.350376] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42f977ce-bdc2-498a-846e-fb7b1bd44c76 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.359085] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Created folder: Instances in parent group-v353750. [ 1202.359310] env[63297]: DEBUG oslo.service.loopingcall [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.359496] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1202.359697] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2cb2c52-ec71-49ee-afa1-4191d338fe58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.374613] env[63297]: DEBUG nova.network.neutron [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1202.382802] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1202.382802] env[63297]: value = "task-1696874" [ 1202.382802] env[63297]: _type = "Task" [ 1202.382802] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.394169] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696874, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.512237] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.521761] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522521} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.521761] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ef57101e-1d8a-4ad5-ad68-cad2dbea33d1/ef57101e-1d8a-4ad5-ad68-cad2dbea33d1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1202.521948] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1202.522243] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a395b6e-59d1-41ba-8bf3-9d1334a01511 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.530463] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1202.530463] env[63297]: value = "task-1696875" [ 1202.530463] env[63297]: _type = "Task" [ 1202.530463] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.544927] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.555824] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526c85cd-ef62-0d53-9393-f7a176cb99ef, 'name': SearchDatastore_Task, 'duration_secs': 0.011016} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.556157] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.556437] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 961c3a87-7f53-4764-b8a4-40a408a30f90/961c3a87-7f53-4764-b8a4-40a408a30f90.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1202.557042] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-410854b3-6676-4b46-a8aa-644c3f71d1c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.566922] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1202.566922] env[63297]: value = "task-1696876" [ 1202.566922] env[63297]: _type = "Task" [ 1202.566922] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.576436] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.615384] env[63297]: DEBUG nova.compute.manager [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1202.673170] env[63297]: DEBUG nova.network.neutron [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Successfully updated port: e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1202.801942] env[63297]: DEBUG nova.network.neutron [-] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.878856] env[63297]: DEBUG nova.network.neutron [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.898256] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696874, 'name': CreateVM_Task, 'duration_secs': 0.420151} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.907211] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1202.907211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.907211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.907211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1202.907211] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0563551-883f-455d-846f-979d52cd8bc4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.914871] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1202.914871] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5221e964-842a-00b7-a6c4-b465317fa65e" [ 1202.914871] env[63297]: _type = "Task" [ 1202.914871] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.926266] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5221e964-842a-00b7-a6c4-b465317fa65e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.981448] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dadbacd-5609-4e86-8645-f7ae759cdd67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.989843] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642015b0-46e1-42b2-8d7b-92f548cce727 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.028797] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57405f2d-a7e3-4860-a452-c3b38a49e314 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.040301] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070263} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.044545] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1203.044545] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f58253-df22-4103-ac5c-2fc8f76b3608 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.047378] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e839728e-9667-43f7-a346-211b0064dcce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.071028] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] ef57101e-1d8a-4ad5-ad68-cad2dbea33d1/ef57101e-1d8a-4ad5-ad68-cad2dbea33d1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.083656] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e9dda07-6819-468f-b303-2b4c4bd97c27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.098780] env[63297]: DEBUG nova.compute.provider_tree [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1203.108803] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696876, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.110897] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1203.110897] env[63297]: value = "task-1696877" [ 1203.110897] env[63297]: _type = "Task" [ 1203.110897] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.120902] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.176275] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.176436] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquired lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.176548] env[63297]: DEBUG nova.network.neutron [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1203.207612] env[63297]: DEBUG nova.network.neutron [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Updated VIF entry in instance network info cache for port ba02b0f2-d414-4714-b54a-10f89df1af3a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1203.208061] env[63297]: DEBUG nova.network.neutron [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Updating instance_info_cache with network_info: [{"id": "ba02b0f2-d414-4714-b54a-10f89df1af3a", "address": "fa:16:3e:4d:c6:d2", "network": {"id": "8be91b0b-92c2-4755-a194-c3063691c530", "bridge": "br-int", "label": "tempest-ServersTestJSON-2064421146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f506a4251d434aaf92405307f98a67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba02b0f2-d4", "ovs_interfaceid": "ba02b0f2-d414-4714-b54a-10f89df1af3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.305714] env[63297]: INFO nova.compute.manager [-] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Took 1.05 seconds to deallocate network for instance. [ 1203.383120] env[63297]: INFO nova.compute.manager [-] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Took 1.03 seconds to deallocate network for instance. [ 1203.426906] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5221e964-842a-00b7-a6c4-b465317fa65e, 'name': SearchDatastore_Task, 'duration_secs': 0.02758} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.428118] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.428494] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1203.428980] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.429364] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.429865] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1203.430505] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49bc6bec-97d5-478c-97bb-5dab6249305c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.440213] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1203.440891] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1203.442029] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e6a1f0d-3a43-485a-af13-3c7b750f95eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.452227] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1203.452227] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526db83c-2809-e167-dc57-4dbe236ccc49" [ 1203.452227] env[63297]: _type = "Task" [ 1203.452227] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.460442] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526db83c-2809-e167-dc57-4dbe236ccc49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.596574] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.877011} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.596872] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 961c3a87-7f53-4764-b8a4-40a408a30f90/961c3a87-7f53-4764-b8a4-40a408a30f90.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1203.597094] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1203.598155] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c73638f0-44ec-4dab-9716-67b4aa0e9385 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.605579] env[63297]: DEBUG nova.scheduler.client.report [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1203.608442] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1203.608442] env[63297]: value = "task-1696878" [ 1203.608442] env[63297]: _type = "Task" [ 1203.608442] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.625033] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.628616] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.630911] env[63297]: DEBUG nova.compute.manager [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1203.669723] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1203.670104] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1203.670315] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1203.670572] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1203.670678] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1203.670923] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1203.671092] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1203.671262] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1203.671368] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1203.671519] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1203.671724] env[63297]: DEBUG nova.virt.hardware [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1203.672963] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8a4d3e-a38d-4bcc-a90a-397293b27223 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.683909] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd4a499-bad3-409c-a65e-8a58b1cd7283 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.699942] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1203.706301] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Creating folder: Project (5b445ed56acb4f5a8a30d5b0e81fabd5). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1203.707984] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a8dd05d-8de1-4429-99ae-cfabc3ed2166 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.714844] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Releasing lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.715158] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-changed-881aea0b-28e5-4b91-af8d-d9c7c69b6446 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1203.715336] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Refreshing instance network info cache due to event network-changed-881aea0b-28e5-4b91-af8d-d9c7c69b6446. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1203.715632] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Acquiring lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.715680] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Acquired lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.715960] env[63297]: DEBUG nova.network.neutron [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Refreshing network info cache for port 881aea0b-28e5-4b91-af8d-d9c7c69b6446 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1203.717275] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquiring lock "86a0579f-211c-42bc-925a-e30aaca4e0f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.717486] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "86a0579f-211c-42bc-925a-e30aaca4e0f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.721663] env[63297]: DEBUG nova.network.neutron [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1203.725970] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Created folder: Project (5b445ed56acb4f5a8a30d5b0e81fabd5) in parent group-v353718. [ 1203.726168] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Creating folder: Instances. Parent ref: group-v353753. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1203.726917] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73e857f0-8735-4445-a170-4cf610b8b0b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.737694] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Created folder: Instances in parent group-v353753. [ 1203.738018] env[63297]: DEBUG oslo.service.loopingcall [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1203.738144] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1203.739094] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f02639d9-154e-453d-ab4f-323b23f41cdd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.758357] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1203.758357] env[63297]: value = "task-1696881" [ 1203.758357] env[63297]: _type = "Task" [ 1203.758357] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.767133] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696881, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.814598] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.895532] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.895532] env[63297]: DEBUG nova.network.neutron [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Updating instance_info_cache with network_info: [{"id": "e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c", "address": "fa:16:3e:ff:06:03", "network": {"id": "bfd15893-9bb1-46a3-bf31-db474ed0269a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1591634149-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734a95312d7d4da38201457d4f542a9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1fefdf1-1c", "ovs_interfaceid": "e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.961289] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526db83c-2809-e167-dc57-4dbe236ccc49, 'name': SearchDatastore_Task, 'duration_secs': 0.023148} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.962764] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-782c4de3-bf6b-4e20-9903-37c8a63be252 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.968913] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1203.968913] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520744a2-98e6-f5c0-901f-a8003f2b8caf" [ 1203.968913] env[63297]: _type = "Task" [ 1203.968913] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.978128] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520744a2-98e6-f5c0-901f-a8003f2b8caf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.110969] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.110969] env[63297]: DEBUG nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1204.113852] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.517s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.115445] env[63297]: INFO nova.compute.claims [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1204.131360] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073526} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.134897] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1204.135191] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696877, 'name': ReconfigVM_Task, 'duration_secs': 0.643935} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.136207] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1f0167-6592-423e-bccf-5d9cca3bc547 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.138844] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Reconfigured VM instance instance-0000000a to attach disk [datastore1] ef57101e-1d8a-4ad5-ad68-cad2dbea33d1/ef57101e-1d8a-4ad5-ad68-cad2dbea33d1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1204.140239] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1ac95e1-390a-49a1-8a6f-d64561f3904a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.171237] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 961c3a87-7f53-4764-b8a4-40a408a30f90/961c3a87-7f53-4764-b8a4-40a408a30f90.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1204.173949] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-657dc1cf-929c-4a52-a0b6-71cacb9eac85 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.191551] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1204.191551] env[63297]: value = "task-1696882" [ 1204.191551] env[63297]: _type = "Task" [ 1204.191551] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.198711] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1204.198711] env[63297]: value = "task-1696883" [ 1204.198711] env[63297]: _type = "Task" [ 1204.198711] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.208142] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696882, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.216695] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696883, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.270897] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696881, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.398490] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Releasing lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.398880] env[63297]: DEBUG nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Instance network_info: |[{"id": "e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c", "address": "fa:16:3e:ff:06:03", "network": {"id": "bfd15893-9bb1-46a3-bf31-db474ed0269a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1591634149-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734a95312d7d4da38201457d4f542a9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1fefdf1-1c", "ovs_interfaceid": "e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1204.399225] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:06:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1204.406952] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Creating folder: Project (734a95312d7d4da38201457d4f542a9e). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1204.407224] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76615bc9-d0e4-46c1-bd6f-e910f0693af0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.420825] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Created folder: Project (734a95312d7d4da38201457d4f542a9e) in parent group-v353718. [ 1204.421899] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Creating folder: Instances. Parent ref: group-v353756. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1204.421899] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62b87f26-10fe-4e40-84d2-2a09750f8073 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.430764] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Created folder: Instances in parent group-v353756. [ 1204.431007] env[63297]: DEBUG oslo.service.loopingcall [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1204.431254] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1204.432755] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e9876e7-f89d-4989-ab3c-c6a02d58c895 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.454655] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1204.454655] env[63297]: value = "task-1696886" [ 1204.454655] env[63297]: _type = "Task" [ 1204.454655] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.462899] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696886, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.478796] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520744a2-98e6-f5c0-901f-a8003f2b8caf, 'name': SearchDatastore_Task, 'duration_secs': 0.026021} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.479031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.479606] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 9b1306f9-4b0a-4116-8e79-271478f33490/9b1306f9-4b0a-4116-8e79-271478f33490.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1204.479606] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-460a40d6-fe81-4551-bb59-502b4c82a4a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.485954] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1204.485954] env[63297]: value = "task-1696887" [ 1204.485954] env[63297]: _type = "Task" [ 1204.485954] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.498819] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.622703] env[63297]: DEBUG nova.network.neutron [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Updated VIF entry in instance network info cache for port 881aea0b-28e5-4b91-af8d-d9c7c69b6446. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.623161] env[63297]: DEBUG nova.network.neutron [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Updating instance_info_cache with network_info: [{"id": "01a6609e-6d94-400f-8f01-fc67889fb600", "address": "fa:16:3e:99:b9:3e", "network": {"id": "ce82ab69-5179-43bb-b12a-72dfa2219f9c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-242865171", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01a6609e-6d", "ovs_interfaceid": "01a6609e-6d94-400f-8f01-fc67889fb600", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6", "address": "fa:16:3e:f0:25:de", "network": {"id": "29ec6440-32c1-4238-a3e1-836811433d63", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1995634363", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b65dc1-9b", "ovs_interfaceid": "f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "881aea0b-28e5-4b91-af8d-d9c7c69b6446", "address": "fa:16:3e:e3:c7:17", "network": {"id": "ce82ab69-5179-43bb-b12a-72dfa2219f9c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-242865171", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap881aea0b-28", "ovs_interfaceid": "881aea0b-28e5-4b91-af8d-d9c7c69b6446", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.625870] env[63297]: DEBUG nova.compute.utils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1204.632060] env[63297]: DEBUG nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1204.632181] env[63297]: DEBUG nova.network.neutron [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1204.674079] env[63297]: DEBUG nova.compute.manager [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Received event network-vif-plugged-3db68b1a-d911-4324-b993-dc755277e56b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1204.674376] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Acquiring lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.674638] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.674878] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.675182] env[63297]: DEBUG nova.compute.manager [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] No waiting events found dispatching network-vif-plugged-3db68b1a-d911-4324-b993-dc755277e56b {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1204.675718] env[63297]: WARNING nova.compute.manager [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Received unexpected event network-vif-plugged-3db68b1a-d911-4324-b993-dc755277e56b for instance with vm_state building and task_state spawning. [ 1204.676366] env[63297]: DEBUG nova.compute.manager [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Received event network-changed-3db68b1a-d911-4324-b993-dc755277e56b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1204.676366] env[63297]: DEBUG nova.compute.manager [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Refreshing instance network info cache due to event network-changed-3db68b1a-d911-4324-b993-dc755277e56b. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1204.676366] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Acquiring lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.676562] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Acquired lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.677142] env[63297]: DEBUG nova.network.neutron [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Refreshing network info cache for port 3db68b1a-d911-4324-b993-dc755277e56b {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.712561] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696882, 'name': Rename_Task, 'duration_secs': 0.179135} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.716713] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1204.716713] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696883, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.716713] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6335b486-3f9e-4995-a7a0-06e259f07c72 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.723665] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1204.723665] env[63297]: value = "task-1696888" [ 1204.723665] env[63297]: _type = "Task" [ 1204.723665] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.735084] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.753327] env[63297]: DEBUG nova.policy [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5adb25e08fd24f96a415514dba440a27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77d216fa01bf4dbea9d3047065ead0cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1204.770620] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696881, 'name': CreateVM_Task, 'duration_secs': 0.5698} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.770815] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1204.771379] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.771573] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.771942] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1204.773661] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54368d3b-399a-464d-b18e-4401cfd482e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.778369] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1204.778369] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b2ab86-8d19-4597-b549-d29b4e5d2098" [ 1204.778369] env[63297]: _type = "Task" [ 1204.778369] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.788044] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b2ab86-8d19-4597-b549-d29b4e5d2098, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.969126] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696886, 'name': CreateVM_Task, 'duration_secs': 0.423237} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.969338] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1204.970582] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.001195] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696887, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.133395] env[63297]: DEBUG nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1205.142313] env[63297]: DEBUG oslo_concurrency.lockutils [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] Releasing lock "refresh_cache-961c3a87-7f53-4764-b8a4-40a408a30f90" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.142313] env[63297]: DEBUG nova.compute.manager [req-045ec814-fa6b-4614-9731-933623e89b11 req-1e083160-a3c6-4e42-8374-505ab334b449 service nova] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Received event network-vif-deleted-0f8d8353-c946-4413-9b67-19c002e27040 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1205.218446] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696883, 'name': ReconfigVM_Task, 'duration_secs': 0.523419} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.218735] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 961c3a87-7f53-4764-b8a4-40a408a30f90/961c3a87-7f53-4764-b8a4-40a408a30f90.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1205.219843] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2ca9065-2190-43e7-a824-5a1271d2b6e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.230614] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1205.230614] env[63297]: value = "task-1696889" [ 1205.230614] env[63297]: _type = "Task" [ 1205.230614] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.238799] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696888, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.244603] env[63297]: DEBUG nova.compute.manager [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Received event network-vif-plugged-e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1205.244711] env[63297]: DEBUG oslo_concurrency.lockutils [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] Acquiring lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.244850] env[63297]: DEBUG oslo_concurrency.lockutils [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] Lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.245193] env[63297]: DEBUG oslo_concurrency.lockutils [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] Lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.245452] env[63297]: DEBUG nova.compute.manager [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] No waiting events found dispatching network-vif-plugged-e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1205.245837] env[63297]: WARNING nova.compute.manager [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Received unexpected event network-vif-plugged-e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c for instance with vm_state building and task_state spawning. [ 1205.245837] env[63297]: DEBUG nova.compute.manager [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Received event network-changed-e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1205.245958] env[63297]: DEBUG nova.compute.manager [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Refreshing instance network info cache due to event network-changed-e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1205.246419] env[63297]: DEBUG oslo_concurrency.lockutils [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] Acquiring lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.246568] env[63297]: DEBUG oslo_concurrency.lockutils [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] Acquired lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.246728] env[63297]: DEBUG nova.network.neutron [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Refreshing network info cache for port e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1205.258726] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696889, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.291334] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b2ab86-8d19-4597-b549-d29b4e5d2098, 'name': SearchDatastore_Task, 'duration_secs': 0.047905} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.291970] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.292261] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1205.292521] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.292704] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.292887] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1205.293467] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.294234] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1205.294234] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-874d801a-c611-44a6-9bd3-0ce088887d86 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.297117] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-136ca92c-6609-46e1-be1a-9f1b93172032 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.305311] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1205.305311] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521bee5e-8181-037f-0d21-9c001a9c87b7" [ 1205.305311] env[63297]: _type = "Task" [ 1205.305311] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.309811] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1205.310050] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1205.311093] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-380d70b5-7bea-4c90-b4fd-5d3ccb0299e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.316724] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521bee5e-8181-037f-0d21-9c001a9c87b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.321604] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1205.321604] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5273ffe9-3ca0-58c5-b58e-a6394cff9e83" [ 1205.321604] env[63297]: _type = "Task" [ 1205.321604] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.330916] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5273ffe9-3ca0-58c5-b58e-a6394cff9e83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.505497] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696887, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563427} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.508487] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 9b1306f9-4b0a-4116-8e79-271478f33490/9b1306f9-4b0a-4116-8e79-271478f33490.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1205.508713] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1205.509198] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8699c6ac-8673-44a6-9f2a-5c05a60edc7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.516622] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1205.516622] env[63297]: value = "task-1696890" [ 1205.516622] env[63297]: _type = "Task" [ 1205.516622] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.527887] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.574777] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58816017-e815-4cca-83d7-080ee6fdc80a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.583421] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2dee12-ea65-4f5d-84c5-5efb7c60ef5e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.615281] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d42d8a-a812-436a-8370-3a2d01651454 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.625865] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb78524-c94f-4da8-af14-c9e2b9b7d819 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.644069] env[63297]: DEBUG nova.compute.provider_tree [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.660273] env[63297]: DEBUG nova.network.neutron [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Successfully created port: 20697777-dceb-47b1-8edc-c6f3abc08f0f {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1205.734212] env[63297]: DEBUG oslo_vmware.api [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1696888, 'name': PowerOnVM_Task, 'duration_secs': 0.898845} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.737839] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1205.738204] env[63297]: INFO nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1205.738325] env[63297]: DEBUG nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1205.739086] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd816be2-daee-41d3-93c5-8e5a81bdc456 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.747729] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696889, 'name': Rename_Task, 'duration_secs': 0.330682} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.750236] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1205.758568] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7abf002b-d6d9-4c62-ab01-6be96faf4c49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.769332] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1205.769332] env[63297]: value = "task-1696891" [ 1205.769332] env[63297]: _type = "Task" [ 1205.769332] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.777717] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696891, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.819683] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521bee5e-8181-037f-0d21-9c001a9c87b7, 'name': SearchDatastore_Task, 'duration_secs': 0.039943} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.820034] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.820332] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1205.820576] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.832726] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5273ffe9-3ca0-58c5-b58e-a6394cff9e83, 'name': SearchDatastore_Task, 'duration_secs': 0.023007} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.833621] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3447dab6-6c2b-4d87-9a3a-dbb00dc270c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.839541] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1205.839541] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5250d1bf-7df6-d352-7f3b-637115a07ea8" [ 1205.839541] env[63297]: _type = "Task" [ 1205.839541] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.848882] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5250d1bf-7df6-d352-7f3b-637115a07ea8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.879906] env[63297]: DEBUG nova.network.neutron [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updated VIF entry in instance network info cache for port 3db68b1a-d911-4324-b993-dc755277e56b. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1205.880343] env[63297]: DEBUG nova.network.neutron [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance_info_cache with network_info: [{"id": "3db68b1a-d911-4324-b993-dc755277e56b", "address": "fa:16:3e:c3:7e:ea", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3db68b1a-d9", "ovs_interfaceid": "3db68b1a-d911-4324-b993-dc755277e56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.027584] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073475} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.027853] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1206.028821] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e72ee30-9e30-4f66-b933-5e7cbccd2534 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.055311] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 9b1306f9-4b0a-4116-8e79-271478f33490/9b1306f9-4b0a-4116-8e79-271478f33490.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1206.055795] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bacc719-4121-4b3a-b660-768f322419d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.077229] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1206.077229] env[63297]: value = "task-1696892" [ 1206.077229] env[63297]: _type = "Task" [ 1206.077229] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.086572] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696892, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.147769] env[63297]: DEBUG nova.scheduler.client.report [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1206.155048] env[63297]: DEBUG nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1206.185031] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1206.185276] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1206.185436] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1206.185616] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1206.185760] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1206.185905] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1206.186128] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1206.186288] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1206.186480] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1206.186738] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1206.186978] env[63297]: DEBUG nova.virt.hardware [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1206.188220] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c19dd2-d95e-4794-906a-5b57d772e35a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.196969] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-decd987a-7318-4a51-847d-7948cffddb92 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.272968] env[63297]: INFO nova.compute.manager [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Took 22.45 seconds to build instance. [ 1206.280354] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696891, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.352747] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5250d1bf-7df6-d352-7f3b-637115a07ea8, 'name': SearchDatastore_Task, 'duration_secs': 0.017924} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.353124] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.353304] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1206.353586] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.354099] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1206.354099] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d096026-e42c-4568-b35b-3c22038da856 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.356140] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a488aa1-4a4e-4a6f-bcdb-5694b0e95d52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.366856] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1206.366856] env[63297]: value = "task-1696893" [ 1206.366856] env[63297]: _type = "Task" [ 1206.366856] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.368252] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1206.368959] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1206.376281] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee2c4dab-c76f-40d8-9eb3-bcfa8ae6b00f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.385385] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Releasing lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.385385] env[63297]: DEBUG nova.compute.manager [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received event network-changed-14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1206.385385] env[63297]: DEBUG nova.compute.manager [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Refreshing instance network info cache due to event network-changed-14298cd9-8999-4142-9f1f-7a512e4a09d1. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1206.385385] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Acquiring lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.385608] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Acquired lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.385608] env[63297]: DEBUG nova.network.neutron [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Refreshing network info cache for port 14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1206.395708] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1206.395708] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521b28a8-5a45-281f-4f7b-214620d33dc8" [ 1206.395708] env[63297]: _type = "Task" [ 1206.395708] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.399165] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.408148] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521b28a8-5a45-281f-4f7b-214620d33dc8, 'name': SearchDatastore_Task, 'duration_secs': 0.012161} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.408252] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9c90be8-9826-4959-afbe-4d2e4ab21ee7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.413722] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1206.413722] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5225e5d0-e936-7357-528a-7b7c729d182c" [ 1206.413722] env[63297]: _type = "Task" [ 1206.413722] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.423030] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5225e5d0-e936-7357-528a-7b7c729d182c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.570792] env[63297]: DEBUG nova.network.neutron [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Updated VIF entry in instance network info cache for port e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1206.571697] env[63297]: DEBUG nova.network.neutron [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Updating instance_info_cache with network_info: [{"id": "e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c", "address": "fa:16:3e:ff:06:03", "network": {"id": "bfd15893-9bb1-46a3-bf31-db474ed0269a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1591634149-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734a95312d7d4da38201457d4f542a9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1fefdf1-1c", "ovs_interfaceid": "e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.589368] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696892, 'name': ReconfigVM_Task, 'duration_secs': 0.326789} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.591712] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 9b1306f9-4b0a-4116-8e79-271478f33490/9b1306f9-4b0a-4116-8e79-271478f33490.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1206.592879] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-216c2c8b-a7ae-48ff-b3a3-6f764b902781 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.601951] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1206.601951] env[63297]: value = "task-1696894" [ 1206.601951] env[63297]: _type = "Task" [ 1206.601951] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.612682] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696894, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.657348] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.657348] env[63297]: DEBUG nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1206.661575] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.611s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.663640] env[63297]: INFO nova.compute.claims [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1206.776702] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bd666e6-1870-4584-8c6c-286098c269f8 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.967s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.785713] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696891, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.887332] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696893, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.927279] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5225e5d0-e936-7357-528a-7b7c729d182c, 'name': SearchDatastore_Task, 'duration_secs': 0.011672} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.927561] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.928298] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b5d34058-fa3e-4806-97e5-638bbbffaeb8/b5d34058-fa3e-4806-97e5-638bbbffaeb8.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1206.928298] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c75bb3e0-29ec-4060-9c2e-2e8183fd13a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.935484] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1206.935484] env[63297]: value = "task-1696895" [ 1206.935484] env[63297]: _type = "Task" [ 1206.935484] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.944046] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696895, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.078295] env[63297]: DEBUG oslo_concurrency.lockutils [req-90442106-2c98-4882-98bb-1ad9dda4da66 req-a7eb493f-f703-45bb-bb6b-bcbfd4c07b35 service nova] Releasing lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.111934] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696894, 'name': Rename_Task, 'duration_secs': 0.296146} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.112373] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1207.112655] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc00cdbd-0bc2-4c00-991e-c82712625925 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.119363] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1207.119363] env[63297]: value = "task-1696896" [ 1207.119363] env[63297]: _type = "Task" [ 1207.119363] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.128695] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.164263] env[63297]: DEBUG nova.compute.utils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1207.165686] env[63297]: DEBUG nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1207.165885] env[63297]: DEBUG nova.network.neutron [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1207.244317] env[63297]: DEBUG nova.policy [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4764fa016f6470ba4d64743952cbee5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71764c2eb27d41208f88179e9c39c0bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1207.280856] env[63297]: DEBUG nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1207.283662] env[63297]: DEBUG oslo_vmware.api [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696891, 'name': PowerOnVM_Task, 'duration_secs': 1.127421} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.284265] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1207.287023] env[63297]: INFO nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Took 18.94 seconds to spawn the instance on the hypervisor. [ 1207.287023] env[63297]: DEBUG nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1207.287023] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1989fb-44ef-4d09-a2da-7dd7bcfcf228 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.386337] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546252} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.386505] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1207.386744] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1207.386979] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df376845-3a86-4186-b096-fb968a694ad2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.395326] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1207.395326] env[63297]: value = "task-1696897" [ 1207.395326] env[63297]: _type = "Task" [ 1207.395326] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.405621] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696897, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.447828] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696895, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.634255] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696896, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.672138] env[63297]: DEBUG nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1207.734234] env[63297]: DEBUG nova.network.neutron [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updated VIF entry in instance network info cache for port 14298cd9-8999-4142-9f1f-7a512e4a09d1. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1207.734234] env[63297]: DEBUG nova.network.neutron [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [{"id": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "address": "fa:16:3e:d1:15:40", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14298cd9-89", "ovs_interfaceid": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.812776] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.816010] env[63297]: INFO nova.compute.manager [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Took 32.60 seconds to build instance. [ 1207.913100] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696897, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093765} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.913100] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1207.913100] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29503571-ca45-4d70-8e40-fb9e99bfb752 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.937828] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1207.940979] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d63f35aa-a654-4dbe-b2b5-3116b5dab0e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.970048] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696895, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69587} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.971416] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b5d34058-fa3e-4806-97e5-638bbbffaeb8/b5d34058-fa3e-4806-97e5-638bbbffaeb8.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1207.971628] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1207.971943] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1207.971943] env[63297]: value = "task-1696898" [ 1207.971943] env[63297]: _type = "Task" [ 1207.971943] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.972146] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c06461c3-f89b-4346-823d-1a6d8ad41703 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.986312] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.987995] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1207.987995] env[63297]: value = "task-1696899" [ 1207.987995] env[63297]: _type = "Task" [ 1207.987995] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.000893] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.105692] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e61287-ee83-4fbe-9568-ef22903c6c86 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.114570] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff1d4b0-e279-4b31-9d29-f3cd38740a24 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.155176] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eeed218-2b7b-4b96-900b-621447a77738 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.163075] env[63297]: DEBUG oslo_vmware.api [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1696896, 'name': PowerOnVM_Task, 'duration_secs': 0.580954} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.163529] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1208.163754] env[63297]: INFO nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1208.163947] env[63297]: DEBUG nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1208.164900] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6209a46a-e73f-4430-af39-b4c9252b4db2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.171433] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852cf43f-c67a-45f3-9077-cfce610b835c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.192290] env[63297]: DEBUG nova.compute.provider_tree [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.236821] env[63297]: DEBUG oslo_concurrency.lockutils [req-47cb2d25-a2b6-44df-b103-c92dec6bf3c6 req-61608ccc-9592-4b0e-83e9-74750e286456 service nova] Releasing lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1208.283723] env[63297]: DEBUG nova.network.neutron [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Successfully created port: 8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1208.316586] env[63297]: DEBUG nova.compute.manager [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1208.317087] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5138db41-75ab-4c00-b615-2c6d490d1195 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.115s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.317960] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd081dd0-bb8b-47fc-812d-99c58b9e17ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.484859] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696898, 'name': ReconfigVM_Task, 'duration_secs': 0.315924} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.485414] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1208.486076] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23b85ed7-3c3e-4bd5-9ec0-222158b1f114 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.492527] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1208.492527] env[63297]: value = "task-1696900" [ 1208.492527] env[63297]: _type = "Task" [ 1208.492527] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.501749] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153298} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.502471] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1208.503203] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9465a878-b8c9-4b08-bc98-c051787f5242 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.510283] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696900, 'name': Rename_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.534071] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] b5d34058-fa3e-4806-97e5-638bbbffaeb8/b5d34058-fa3e-4806-97e5-638bbbffaeb8.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1208.534297] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35b8e496-0260-4843-b595-c0d15b18ea91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.556694] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1208.556694] env[63297]: value = "task-1696901" [ 1208.556694] env[63297]: _type = "Task" [ 1208.556694] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.568271] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696901, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.688478] env[63297]: INFO nova.compute.manager [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Took 23.20 seconds to build instance. [ 1208.695955] env[63297]: DEBUG nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1208.698716] env[63297]: DEBUG nova.scheduler.client.report [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1208.733231] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1208.733505] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1208.733673] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1208.733865] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1208.734217] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1208.734217] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1208.734485] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1208.734664] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1208.734860] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1208.735061] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1208.735263] env[63297]: DEBUG nova.virt.hardware [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1208.736531] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985d19fb-79c7-4446-8d7c-003ff91c94b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.745440] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f71af9-f4e3-4e2c-a5cf-c1164cea4857 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.782904] env[63297]: DEBUG nova.network.neutron [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Successfully updated port: 20697777-dceb-47b1-8edc-c6f3abc08f0f {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1208.823508] env[63297]: DEBUG nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1208.832755] env[63297]: INFO nova.compute.manager [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] instance snapshotting [ 1208.835605] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d3033f-93d8-4bad-8959-00bbed7d75e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.863317] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f603b5f-750c-49b5-8044-c066d68209fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.005676] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696900, 'name': Rename_Task, 'duration_secs': 0.136726} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.006175] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1209.006533] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-449d2f3b-cf4e-4f30-ac79-d4d43703851c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.014469] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1209.014469] env[63297]: value = "task-1696902" [ 1209.014469] env[63297]: _type = "Task" [ 1209.014469] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.021413] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.067275] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696901, 'name': ReconfigVM_Task, 'duration_secs': 0.282283} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.067709] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Reconfigured VM instance instance-0000000c to attach disk [datastore1] b5d34058-fa3e-4806-97e5-638bbbffaeb8/b5d34058-fa3e-4806-97e5-638bbbffaeb8.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1209.068506] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d25cc4f4-239b-4232-a632-99a988860f54 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.076399] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1209.076399] env[63297]: value = "task-1696903" [ 1209.076399] env[63297]: _type = "Task" [ 1209.076399] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.085159] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696903, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.190934] env[63297]: DEBUG oslo_concurrency.lockutils [None req-51701846-a0ef-48f5-a55e-5672d2581047 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.717s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.204020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.204579] env[63297]: DEBUG nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1209.211080] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.172s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.211080] env[63297]: DEBUG nova.objects.instance [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1209.288238] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquiring lock "refresh_cache-6d290634-67e7-4fb4-9a88-3da6eca34d4b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1209.288511] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquired lock "refresh_cache-6d290634-67e7-4fb4-9a88-3da6eca34d4b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.288698] env[63297]: DEBUG nova.network.neutron [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1209.362854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.373620] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1209.373961] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-48ca5895-0d3c-40fe-8c4d-70880f3f0b4b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.383034] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1209.383034] env[63297]: value = "task-1696904" [ 1209.383034] env[63297]: _type = "Task" [ 1209.383034] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.394311] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696904, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.525649] env[63297]: DEBUG oslo_vmware.api [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696902, 'name': PowerOnVM_Task, 'duration_secs': 0.465985} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.525946] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1209.527484] env[63297]: INFO nova.compute.manager [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Took 5.90 seconds to spawn the instance on the hypervisor. [ 1209.527484] env[63297]: DEBUG nova.compute.manager [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1209.530435] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d7223c-3388-4a46-b9d9-e600be08868a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.592742] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696903, 'name': Rename_Task, 'duration_secs': 0.170025} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.598040] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1209.598040] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-348d62ac-6132-49cd-9f8c-6eac35b633d4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.605915] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1209.605915] env[63297]: value = "task-1696905" [ 1209.605915] env[63297]: _type = "Task" [ 1209.605915] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.616024] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696905, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.617911] env[63297]: DEBUG nova.compute.manager [req-0256c9fd-525b-44ec-a46e-62bd2968a28b req-56b7ec9d-752e-416a-812d-aaafb508b514 service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Received event network-vif-plugged-20697777-dceb-47b1-8edc-c6f3abc08f0f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1209.617911] env[63297]: DEBUG oslo_concurrency.lockutils [req-0256c9fd-525b-44ec-a46e-62bd2968a28b req-56b7ec9d-752e-416a-812d-aaafb508b514 service nova] Acquiring lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.618022] env[63297]: DEBUG oslo_concurrency.lockutils [req-0256c9fd-525b-44ec-a46e-62bd2968a28b req-56b7ec9d-752e-416a-812d-aaafb508b514 service nova] Lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.618148] env[63297]: DEBUG oslo_concurrency.lockutils [req-0256c9fd-525b-44ec-a46e-62bd2968a28b req-56b7ec9d-752e-416a-812d-aaafb508b514 service nova] Lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.618294] env[63297]: DEBUG nova.compute.manager [req-0256c9fd-525b-44ec-a46e-62bd2968a28b req-56b7ec9d-752e-416a-812d-aaafb508b514 service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] No waiting events found dispatching network-vif-plugged-20697777-dceb-47b1-8edc-c6f3abc08f0f {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1209.618451] env[63297]: WARNING nova.compute.manager [req-0256c9fd-525b-44ec-a46e-62bd2968a28b req-56b7ec9d-752e-416a-812d-aaafb508b514 service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Received unexpected event network-vif-plugged-20697777-dceb-47b1-8edc-c6f3abc08f0f for instance with vm_state building and task_state spawning. [ 1209.694502] env[63297]: DEBUG nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1209.717369] env[63297]: DEBUG nova.compute.utils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1209.721264] env[63297]: DEBUG nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1209.721264] env[63297]: DEBUG nova.network.neutron [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1209.834099] env[63297]: DEBUG nova.network.neutron [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1209.896987] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696904, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.059207] env[63297]: INFO nova.compute.manager [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Took 18.97 seconds to build instance. [ 1210.087975] env[63297]: DEBUG nova.policy [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '077b7be132cb45bd98fafd1ca2cdde8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eeccf8ef3e5e42758835abff95dc73ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1210.123666] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696905, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.226152] env[63297]: DEBUG nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1210.231560] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.232666] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e577e2c7-01d4-493c-9429-1d7b8c4d283e tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.234134] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.507s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.234246] env[63297]: DEBUG nova.objects.instance [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Lazy-loading 'resources' on Instance uuid 3d66ef2c-ac35-4eae-a205-6dd80ee564d1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1210.404136] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696904, 'name': CreateSnapshot_Task, 'duration_secs': 0.762046} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.405812] env[63297]: DEBUG nova.network.neutron [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Updating instance_info_cache with network_info: [{"id": "20697777-dceb-47b1-8edc-c6f3abc08f0f", "address": "fa:16:3e:80:eb:51", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20697777-dc", "ovs_interfaceid": "20697777-dceb-47b1-8edc-c6f3abc08f0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.406628] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1210.407712] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3338968e-db3e-4c0a-a386-46450e752d8b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.563346] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ab763bf3-ae58-49d3-9feb-0934f04d4fa6 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.483s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.623439] env[63297]: DEBUG oslo_vmware.api [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1696905, 'name': PowerOnVM_Task, 'duration_secs': 0.701273} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.623782] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1210.624090] env[63297]: INFO nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Took 9.49 seconds to spawn the instance on the hypervisor. [ 1210.624509] env[63297]: DEBUG nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1210.625097] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ef4091-fe91-41ec-8133-eeac563491c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.907798] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Releasing lock "refresh_cache-6d290634-67e7-4fb4-9a88-3da6eca34d4b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.909674] env[63297]: DEBUG nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Instance network_info: |[{"id": "20697777-dceb-47b1-8edc-c6f3abc08f0f", "address": "fa:16:3e:80:eb:51", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20697777-dc", "ovs_interfaceid": "20697777-dceb-47b1-8edc-c6f3abc08f0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1210.909674] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:eb:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20697777-dceb-47b1-8edc-c6f3abc08f0f', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1210.920533] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Creating folder: Project (77d216fa01bf4dbea9d3047065ead0cd). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1210.925494] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2747849f-1879-48ca-8d51-f1c5194edc83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.938775] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1210.939402] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-da2f63d3-fe1b-4423-873d-49f88df4f3fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.952960] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1210.952960] env[63297]: value = "task-1696907" [ 1210.952960] env[63297]: _type = "Task" [ 1210.952960] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.961833] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Created folder: Project (77d216fa01bf4dbea9d3047065ead0cd) in parent group-v353718. [ 1210.962837] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Creating folder: Instances. Parent ref: group-v353760. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1210.962837] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd2887ab-b146-4c18-95fc-f45c00569dd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.974823] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696907, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.986478] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Created folder: Instances in parent group-v353760. [ 1210.986738] env[63297]: DEBUG oslo.service.loopingcall [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1210.986927] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1210.987150] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b80b913-a987-45ad-8e31-587a2f033670 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.013359] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1211.013359] env[63297]: value = "task-1696909" [ 1211.013359] env[63297]: _type = "Task" [ 1211.013359] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.022727] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696909, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.068739] env[63297]: DEBUG nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1211.150245] env[63297]: INFO nova.compute.manager [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Took 21.14 seconds to build instance. [ 1211.242916] env[63297]: DEBUG nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1211.263871] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b010744-29a7-46ef-b186-08f65b0d695c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.267259] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffff422-53ab-453c-8a95-4149bb1db7db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.313275] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1211.313580] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1211.313758] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1211.313992] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1211.314921] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1211.314921] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1211.314921] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1211.314921] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1211.315109] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1211.315283] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1211.317555] env[63297]: DEBUG nova.virt.hardware [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1211.317555] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87ad5e8-3619-4bf8-bfee-63060b1f112c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.320280] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a084e45-3a4e-48af-b93e-df44f5a327ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.343194] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73abb6bd-faa7-4810-a0dc-cf6e351f9974 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.357283] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ae87d1-4db7-4124-8ada-1b148be3b76c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.391948] env[63297]: DEBUG nova.compute.provider_tree [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.468951] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696907, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.532985] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696909, 'name': CreateVM_Task, 'duration_secs': 0.415518} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.533293] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1211.534134] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.534479] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.534824] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1211.536337] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ec956a0-dbe3-48ce-a982-b1948efc4544 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.541190] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1211.541190] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5262c441-42d6-a997-b5ae-9a13e20e459d" [ 1211.541190] env[63297]: _type = "Task" [ 1211.541190] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.552960] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5262c441-42d6-a997-b5ae-9a13e20e459d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.603870] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.654321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0ea7305-44ea-4c36-94ee-d8dd0a6361c0 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.657s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.898158] env[63297]: DEBUG nova.scheduler.client.report [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1211.903237] env[63297]: DEBUG nova.network.neutron [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Successfully created port: 49d5fb68-a759-487e-b35a-545fe16b7625 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1211.967149] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696907, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.057111] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5262c441-42d6-a997-b5ae-9a13e20e459d, 'name': SearchDatastore_Task, 'duration_secs': 0.010261} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.057775] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1212.060997] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1212.060997] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.060997] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.060997] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1212.060997] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e15dc68-9648-4838-9333-ccde77b72539 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.068895] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1212.069104] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1212.070019] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7cb7603-c939-4e4d-873b-a9ca9946f24f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.076038] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1212.076038] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5290e9f0-4994-2ff3-00dd-129d3ec02d1e" [ 1212.076038] env[63297]: _type = "Task" [ 1212.076038] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.085096] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5290e9f0-4994-2ff3-00dd-129d3ec02d1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.162864] env[63297]: DEBUG nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1212.409928] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.176s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.412438] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.246s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.412438] env[63297]: DEBUG nova.objects.instance [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lazy-loading 'resources' on Instance uuid e5f198e8-2080-4e3e-8ad5-964b855d70ff {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.448766] env[63297]: INFO nova.scheduler.client.report [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Deleted allocations for instance 3d66ef2c-ac35-4eae-a205-6dd80ee564d1 [ 1212.471684] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696907, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.476758] env[63297]: DEBUG nova.network.neutron [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Successfully updated port: 8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1212.542700] env[63297]: INFO nova.compute.manager [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Rebuilding instance [ 1212.596960] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5290e9f0-4994-2ff3-00dd-129d3ec02d1e, 'name': SearchDatastore_Task, 'duration_secs': 0.010022} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.599364] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72cd5440-5e28-4bcf-859f-49089c63b5d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.607306] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1212.607306] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e86922-6dcf-692a-f8c8-23865a8d0155" [ 1212.607306] env[63297]: _type = "Task" [ 1212.607306] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.614456] env[63297]: DEBUG nova.compute.manager [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1212.615311] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba9af8d-570a-4086-aa1d-c2375b0a4689 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.625147] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e86922-6dcf-692a-f8c8-23865a8d0155, 'name': SearchDatastore_Task, 'duration_secs': 0.014663} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.626939] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1212.627932] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6d290634-67e7-4fb4-9a88-3da6eca34d4b/6d290634-67e7-4fb4-9a88-3da6eca34d4b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1212.632058] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af3e1c7c-ed44-4004-991c-2f2624922836 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.643634] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1212.643634] env[63297]: value = "task-1696910" [ 1212.643634] env[63297]: _type = "Task" [ 1212.643634] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.656803] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.690052] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.971729] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adef25bc-5383-45d7-a3d6-73dd8bea68f1 tempest-DeleteServersAdminTestJSON-1979667833 tempest-DeleteServersAdminTestJSON-1979667833-project-admin] Lock "3d66ef2c-ac35-4eae-a205-6dd80ee564d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.114s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.982399] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696907, 'name': CloneVM_Task, 'duration_secs': 1.633905} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.984697] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Created linked-clone VM from snapshot [ 1212.984697] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquiring lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.984697] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquired lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.984697] env[63297]: DEBUG nova.network.neutron [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1212.986835] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210e1d11-124f-49b9-9677-1a26ff3e60ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.995954] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Uploading image 662cfb39-9a79-4f39-9803-185270a2034a {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1213.014859] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1213.015563] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f27d1ea4-c333-4b3f-9f1c-1d64a7b02e1f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.029354] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1213.029354] env[63297]: value = "task-1696911" [ 1213.029354] env[63297]: _type = "Task" [ 1213.029354] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.039904] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696911, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.113307] env[63297]: DEBUG nova.compute.manager [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Received event network-changed-20697777-dceb-47b1-8edc-c6f3abc08f0f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1213.113307] env[63297]: DEBUG nova.compute.manager [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Refreshing instance network info cache due to event network-changed-20697777-dceb-47b1-8edc-c6f3abc08f0f. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1213.113307] env[63297]: DEBUG oslo_concurrency.lockutils [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] Acquiring lock "refresh_cache-6d290634-67e7-4fb4-9a88-3da6eca34d4b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.113307] env[63297]: DEBUG oslo_concurrency.lockutils [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] Acquired lock "refresh_cache-6d290634-67e7-4fb4-9a88-3da6eca34d4b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.113662] env[63297]: DEBUG nova.network.neutron [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Refreshing network info cache for port 20697777-dceb-47b1-8edc-c6f3abc08f0f {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1213.136968] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.137975] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfc2d401-3f69-404a-af9e-8357fb89fc8a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.145657] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "961c3a87-7f53-4764-b8a4-40a408a30f90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.146376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.146796] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.146884] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.147039] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.158714] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1213.158714] env[63297]: value = "task-1696912" [ 1213.158714] env[63297]: _type = "Task" [ 1213.158714] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.159132] env[63297]: INFO nova.compute.manager [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Terminating instance [ 1213.162877] env[63297]: DEBUG nova.compute.manager [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1213.163210] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1213.167831] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dde07d-aa23-4566-8ef8-f84178f4510e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.175277] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696910, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.181229] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696912, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.187957] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.188608] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e69ee47-bc93-4a58-b984-0d352d14e176 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.200484] env[63297]: DEBUG oslo_vmware.api [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1213.200484] env[63297]: value = "task-1696913" [ 1213.200484] env[63297]: _type = "Task" [ 1213.200484] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.215375] env[63297]: DEBUG oslo_vmware.api [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696913, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.440993] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d0e582-171b-478b-9d11-c86d8bcafe73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.449402] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6dd0bd-108b-476c-8ce2-8a3733d79fa0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.480457] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedf7585-c8f8-48af-896c-5302e5d493d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.489101] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677d4751-9b8c-420f-b41e-9b81eaca8524 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.507278] env[63297]: DEBUG nova.compute.provider_tree [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.544103] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696911, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.583464] env[63297]: DEBUG nova.network.neutron [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1213.590992] env[63297]: DEBUG nova.compute.manager [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1214.294593] env[63297]: DEBUG nova.scheduler.client.report [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1214.318937] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696910, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.697631} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.319392] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.327128] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6d290634-67e7-4fb4-9a88-3da6eca34d4b/6d290634-67e7-4fb4-9a88-3da6eca34d4b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1214.327128] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1214.327371] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696912, 'name': PowerOffVM_Task, 'duration_secs': 0.277589} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.327492] env[63297]: DEBUG oslo_vmware.api [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696913, 'name': PowerOffVM_Task, 'duration_secs': 0.386608} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.328101] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0a74640-6b95-4cf5-8c5a-2f20fd261f38 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.332904] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.332904] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1214.332904] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.332904] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.334349] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2ad4e8-75bb-4954-8337-f4c5e48bca83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.337655] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f30af8ed-3d8c-479c-88f1-546296e83bd8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.342529] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696911, 'name': Destroy_Task, 'duration_secs': 0.587438} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.343548] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Destroyed the VM [ 1214.343650] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1214.344853] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c2943906-9b4a-48aa-9efc-ad761fa116e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.348399] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.349640] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f8c42cc-a597-4440-8bb0-5e276277f61c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.351241] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1214.351241] env[63297]: value = "task-1696914" [ 1214.351241] env[63297]: _type = "Task" [ 1214.351241] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.352423] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1214.352423] env[63297]: value = "task-1696916" [ 1214.352423] env[63297]: _type = "Task" [ 1214.352423] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.366599] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696914, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.370515] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696916, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.382812] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.382812] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.382812] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Deleting the datastore file [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.382812] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c78852d-6859-4f22-858c-57b5d03d8df1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.391369] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1214.391369] env[63297]: value = "task-1696918" [ 1214.391369] env[63297]: _type = "Task" [ 1214.391369] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.401114] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.537888] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.538715] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.538715] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Deleting the datastore file [datastore1] 961c3a87-7f53-4764-b8a4-40a408a30f90 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.538886] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a8228ed-9a52-4eee-a597-c589fa0488ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.549734] env[63297]: DEBUG oslo_vmware.api [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1214.549734] env[63297]: value = "task-1696919" [ 1214.549734] env[63297]: _type = "Task" [ 1214.549734] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.563010] env[63297]: DEBUG oslo_vmware.api [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.584916] env[63297]: DEBUG nova.network.neutron [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Updated VIF entry in instance network info cache for port 20697777-dceb-47b1-8edc-c6f3abc08f0f. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1214.584916] env[63297]: DEBUG nova.network.neutron [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Updating instance_info_cache with network_info: [{"id": "20697777-dceb-47b1-8edc-c6f3abc08f0f", "address": "fa:16:3e:80:eb:51", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20697777-dc", "ovs_interfaceid": "20697777-dceb-47b1-8edc-c6f3abc08f0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.802540] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.390s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.805324] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.407s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.808743] env[63297]: INFO nova.compute.claims [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1214.845217] env[63297]: INFO nova.scheduler.client.report [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Deleted allocations for instance e5f198e8-2080-4e3e-8ad5-964b855d70ff [ 1214.875030] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696914, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075769} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.877027] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1214.878010] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696916, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.879846] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a718c045-17af-435c-b23b-010011e616b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.905307] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 6d290634-67e7-4fb4-9a88-3da6eca34d4b/6d290634-67e7-4fb4-9a88-3da6eca34d4b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1214.909361] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3c98326-e802-42f2-a2ff-968e255a5b42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.924789] env[63297]: DEBUG nova.network.neutron [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Updating instance_info_cache with network_info: [{"id": "8cf1041d-9ff1-4cf4-808d-40d2edaf0e06", "address": "fa:16:3e:bc:60:fa", "network": {"id": "f7678fac-2ee8-4f1c-8731-678d3fa06e4d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-63536799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71764c2eb27d41208f88179e9c39c0bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cf1041d-9f", "ovs_interfaceid": "8cf1041d-9ff1-4cf4-808d-40d2edaf0e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.934138] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128014} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.935787] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1214.936054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1214.936279] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1214.938797] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1214.938797] env[63297]: value = "task-1696920" [ 1214.938797] env[63297]: _type = "Task" [ 1214.938797] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.948783] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696920, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.061227] env[63297]: DEBUG oslo_vmware.api [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1696919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265647} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.061227] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1215.061227] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1215.061227] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1215.061227] env[63297]: INFO nova.compute.manager [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Took 1.90 seconds to destroy the instance on the hypervisor. [ 1215.061227] env[63297]: DEBUG oslo.service.loopingcall [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1215.061227] env[63297]: DEBUG nova.compute.manager [-] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1215.061227] env[63297]: DEBUG nova.network.neutron [-] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1215.089188] env[63297]: DEBUG oslo_concurrency.lockutils [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] Releasing lock "refresh_cache-6d290634-67e7-4fb4-9a88-3da6eca34d4b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.089344] env[63297]: DEBUG nova.compute.manager [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Received event network-vif-plugged-8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1215.089547] env[63297]: DEBUG oslo_concurrency.lockutils [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] Acquiring lock "35c68986-51b5-43ba-a076-aca3c86d68bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.089759] env[63297]: DEBUG oslo_concurrency.lockutils [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.089967] env[63297]: DEBUG oslo_concurrency.lockutils [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.090178] env[63297]: DEBUG nova.compute.manager [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] No waiting events found dispatching network-vif-plugged-8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1215.090355] env[63297]: WARNING nova.compute.manager [req-cc637db9-9eb1-42b3-9f87-2060b893f2b5 req-6e8a3fcf-cf0b-4093-93b8-a615d4f40299 service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Received unexpected event network-vif-plugged-8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 for instance with vm_state building and task_state spawning. [ 1215.356984] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20bea38b-00d3-43c5-9e9c-cb05e5d1857f tempest-TenantUsagesTestJSON-2043461262 tempest-TenantUsagesTestJSON-2043461262-project-member] Lock "e5f198e8-2080-4e3e-8ad5-964b855d70ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.299s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.377539] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696916, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.431026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Releasing lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.431026] env[63297]: DEBUG nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Instance network_info: |[{"id": "8cf1041d-9ff1-4cf4-808d-40d2edaf0e06", "address": "fa:16:3e:bc:60:fa", "network": {"id": "f7678fac-2ee8-4f1c-8731-678d3fa06e4d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-63536799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71764c2eb27d41208f88179e9c39c0bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cf1041d-9f", "ovs_interfaceid": "8cf1041d-9ff1-4cf4-808d-40d2edaf0e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1215.431026] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:60:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8cf1041d-9ff1-4cf4-808d-40d2edaf0e06', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1215.439199] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Creating folder: Project (71764c2eb27d41208f88179e9c39c0bb). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1215.439630] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1ca0320-5aee-4dc2-90f5-16551849a009 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.456426] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696920, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.459845] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Created folder: Project (71764c2eb27d41208f88179e9c39c0bb) in parent group-v353718. [ 1215.460134] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Creating folder: Instances. Parent ref: group-v353764. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1215.460321] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b77d769e-f4ed-44d9-87ba-a6c3d3241677 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.471389] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Created folder: Instances in parent group-v353764. [ 1215.472083] env[63297]: DEBUG oslo.service.loopingcall [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1215.472083] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1215.472251] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33098ee6-f5b6-4712-983f-0ccf1d351458 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.494223] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1215.494223] env[63297]: value = "task-1696923" [ 1215.494223] env[63297]: _type = "Task" [ 1215.494223] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.503746] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696923, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.875479] env[63297]: DEBUG oslo_vmware.api [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696916, 'name': RemoveSnapshot_Task, 'duration_secs': 1.046146} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.877395] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1215.911688] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "8c10c573-de56-4c72-959a-65bf53b805a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.911913] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "8c10c573-de56-4c72-959a-65bf53b805a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.956838] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696920, 'name': ReconfigVM_Task, 'duration_secs': 0.524113} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.956838] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 6d290634-67e7-4fb4-9a88-3da6eca34d4b/6d290634-67e7-4fb4-9a88-3da6eca34d4b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1215.958925] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7014821f-74c8-4b17-ad30-b8332963e8a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.966798] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1215.966798] env[63297]: value = "task-1696924" [ 1215.966798] env[63297]: _type = "Task" [ 1215.966798] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.978396] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696924, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.000642] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1216.000769] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1216.000918] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1216.001180] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1216.001350] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1216.003447] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1216.003447] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1216.003447] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1216.003447] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1216.003447] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1216.003447] env[63297]: DEBUG nova.virt.hardware [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1216.004252] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61adf648-1bb4-4336-8fe9-e45bc4be035c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.014777] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696923, 'name': CreateVM_Task, 'duration_secs': 0.516464} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.017400] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1216.018108] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.018279] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.018684] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1216.019947] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00de2922-137b-4153-b161-f4a7bac80c05 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.027199] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48c828f2-2e19-4b91-afff-2655e968c72b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.040355] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1216.049680] env[63297]: DEBUG oslo.service.loopingcall [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1216.054012] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1216.054374] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1216.054374] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9b7f3-9adf-bc2d-cda3-e34c370ce41e" [ 1216.054374] env[63297]: _type = "Task" [ 1216.054374] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.055151] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43ca4b1c-2ed9-4fae-b4f4-0d7071ed756b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.080987] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9b7f3-9adf-bc2d-cda3-e34c370ce41e, 'name': SearchDatastore_Task, 'duration_secs': 0.01598} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.082471] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.082668] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1216.082890] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.083049] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.083268] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1216.084464] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1216.084464] env[63297]: value = "task-1696925" [ 1216.084464] env[63297]: _type = "Task" [ 1216.084464] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.084464] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d011ccf5-1bf8-421a-96b4-5991ce79051a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.094347] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696925, 'name': CreateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.098506] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1216.098506] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1216.099190] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a9c4d59-15b6-442c-8231-aec7108d67a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.104051] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1216.104051] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5293e229-dcbf-43be-1064-344cf3bbf0a1" [ 1216.104051] env[63297]: _type = "Task" [ 1216.104051] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.112359] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5293e229-dcbf-43be-1064-344cf3bbf0a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.310018] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ceb72b6-8800-45de-879b-c57c969fee19 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.318877] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe56b98-05c7-4e93-be33-09701e8855cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.353988] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32188a95-4cdb-4ec3-8fd4-9f808cf5219c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.363925] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a69def0-9ab7-4c5c-aaa1-419af4e08f7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.378330] env[63297]: DEBUG nova.compute.provider_tree [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.384259] env[63297]: WARNING nova.compute.manager [None req-46f80699-1c2b-443a-9684-5952cb09de07 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Image not found during snapshot: nova.exception.ImageNotFound: Image 662cfb39-9a79-4f39-9803-185270a2034a could not be found. [ 1216.473266] env[63297]: DEBUG nova.network.neutron [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Successfully updated port: 49d5fb68-a759-487e-b35a-545fe16b7625 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1216.479717] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696924, 'name': Rename_Task, 'duration_secs': 0.263732} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.480072] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1216.480845] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17c7c6a4-194f-48bc-b72c-9e4efd60d061 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.488561] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1216.488561] env[63297]: value = "task-1696926" [ 1216.488561] env[63297]: _type = "Task" [ 1216.488561] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.500234] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.600029] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696925, 'name': CreateVM_Task, 'duration_secs': 0.499101} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.600029] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1216.600029] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.600029] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.600029] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1216.600029] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60c381cc-f2c5-4565-95a9-480b1296589a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.606905] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1216.606905] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a88c89-6bac-0f9e-dada-08795b4278a4" [ 1216.606905] env[63297]: _type = "Task" [ 1216.606905] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.628400] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a88c89-6bac-0f9e-dada-08795b4278a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.629085] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5293e229-dcbf-43be-1064-344cf3bbf0a1, 'name': SearchDatastore_Task, 'duration_secs': 0.01588} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.629943] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5de684fc-3421-478f-9427-516ae54b493a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.636103] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1216.636103] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527966b0-d961-8f98-f755-a0c7b72b576e" [ 1216.636103] env[63297]: _type = "Task" [ 1216.636103] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.647513] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527966b0-d961-8f98-f755-a0c7b72b576e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.881695] env[63297]: DEBUG nova.scheduler.client.report [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1216.976145] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "refresh_cache-4438e230-0589-48ae-8848-d1f8414efa61" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.976308] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquired lock "refresh_cache-4438e230-0589-48ae-8848-d1f8414efa61" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.976473] env[63297]: DEBUG nova.network.neutron [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1217.002118] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696926, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.044410] env[63297]: DEBUG nova.compute.manager [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Received event network-changed-8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1217.044600] env[63297]: DEBUG nova.compute.manager [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Refreshing instance network info cache due to event network-changed-8cf1041d-9ff1-4cf4-808d-40d2edaf0e06. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1217.044808] env[63297]: DEBUG oslo_concurrency.lockutils [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] Acquiring lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.044947] env[63297]: DEBUG oslo_concurrency.lockutils [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] Acquired lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.049250] env[63297]: DEBUG nova.network.neutron [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Refreshing network info cache for port 8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1217.122794] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a88c89-6bac-0f9e-dada-08795b4278a4, 'name': SearchDatastore_Task, 'duration_secs': 0.025734} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.123476] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.123713] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1217.123925] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.150601] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527966b0-d961-8f98-f755-a0c7b72b576e, 'name': SearchDatastore_Task, 'duration_secs': 0.011159} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.151174] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.151224] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 35c68986-51b5-43ba-a076-aca3c86d68bc/35c68986-51b5-43ba-a076-aca3c86d68bc.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1217.152502] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.152502] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1217.152502] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0afe2efe-25e6-4e34-8c2a-429e71735a52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.154540] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-844e9e38-609e-474f-b670-9ee5e234fc0f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.162405] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1217.162405] env[63297]: value = "task-1696927" [ 1217.162405] env[63297]: _type = "Task" [ 1217.162405] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.170336] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1217.170569] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1217.171738] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f301e87c-6670-4b27-8240-96b16329e744 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.179038] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.181328] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1217.181328] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c35e5f-0758-f224-def1-8ee07fb54468" [ 1217.181328] env[63297]: _type = "Task" [ 1217.181328] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.190854] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c35e5f-0758-f224-def1-8ee07fb54468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.382897] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.383249] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.387866] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.387866] env[63297]: DEBUG nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1217.391842] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.786s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.392933] env[63297]: INFO nova.compute.claims [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1217.446112] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.446367] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.504738] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "92439795-6240-4103-940b-de6d87738570" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.505141] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "92439795-6240-4103-940b-de6d87738570" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.505472] env[63297]: DEBUG oslo_vmware.api [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696926, 'name': PowerOnVM_Task, 'duration_secs': 0.995373} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.505774] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1217.506061] env[63297]: INFO nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Took 11.35 seconds to spawn the instance on the hypervisor. [ 1217.506318] env[63297]: DEBUG nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1217.507968] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae44e7d-ad0a-4fce-b1e8-b2f87c1b70c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.624035] env[63297]: DEBUG nova.network.neutron [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1217.678865] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696927, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.703526] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c35e5f-0758-f224-def1-8ee07fb54468, 'name': SearchDatastore_Task, 'duration_secs': 0.017108} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.706888] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-383f68c8-6dcd-4c68-aceb-2eefb04d2bc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.714192] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1217.714192] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e9902a-c938-5280-185a-25fde93562a6" [ 1217.714192] env[63297]: _type = "Task" [ 1217.714192] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.724539] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e9902a-c938-5280-185a-25fde93562a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.899022] env[63297]: DEBUG nova.compute.utils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1217.906473] env[63297]: DEBUG nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1217.906473] env[63297]: DEBUG nova.network.neutron [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1218.033346] env[63297]: INFO nova.compute.manager [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Took 25.06 seconds to build instance. [ 1218.176864] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530056} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.177142] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 35c68986-51b5-43ba-a076-aca3c86d68bc/35c68986-51b5-43ba-a076-aca3c86d68bc.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1218.177389] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1218.177643] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8058f91f-c1bd-4d95-8a79-ff0dd3474e1f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.184740] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1218.184740] env[63297]: value = "task-1696928" [ 1218.184740] env[63297]: _type = "Task" [ 1218.184740] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.194412] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696928, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.198475] env[63297]: DEBUG nova.network.neutron [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Updating instance_info_cache with network_info: [{"id": "49d5fb68-a759-487e-b35a-545fe16b7625", "address": "fa:16:3e:7b:10:f8", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49d5fb68-a7", "ovs_interfaceid": "49d5fb68-a759-487e-b35a-545fe16b7625", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.228353] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e9902a-c938-5280-185a-25fde93562a6, 'name': SearchDatastore_Task, 'duration_secs': 0.011263} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.229867] env[63297]: DEBUG nova.policy [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1aa728f92b2c472faecd39da428eef56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2a338e9883940f99ab4136eb4d102ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1218.231360] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.231695] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1218.231879] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef17b0c4-a51d-4b6d-adf1-d68e0c833ba2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.239560] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1218.239560] env[63297]: value = "task-1696929" [ 1218.239560] env[63297]: _type = "Task" [ 1218.239560] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.248416] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.275750] env[63297]: DEBUG nova.network.neutron [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Updated VIF entry in instance network info cache for port 8cf1041d-9ff1-4cf4-808d-40d2edaf0e06. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1218.276128] env[63297]: DEBUG nova.network.neutron [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Updating instance_info_cache with network_info: [{"id": "8cf1041d-9ff1-4cf4-808d-40d2edaf0e06", "address": "fa:16:3e:bc:60:fa", "network": {"id": "f7678fac-2ee8-4f1c-8731-678d3fa06e4d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-63536799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71764c2eb27d41208f88179e9c39c0bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cf1041d-9f", "ovs_interfaceid": "8cf1041d-9ff1-4cf4-808d-40d2edaf0e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.342556] env[63297]: DEBUG nova.network.neutron [-] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.408769] env[63297]: DEBUG nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1218.536335] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7fc652d1-136a-43ca-b6ea-7cafac911ab9 tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.579s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.698665] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696928, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125471} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.702951] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1218.703577] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Releasing lock "refresh_cache-4438e230-0589-48ae-8848-d1f8414efa61" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.703893] env[63297]: DEBUG nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Instance network_info: |[{"id": "49d5fb68-a759-487e-b35a-545fe16b7625", "address": "fa:16:3e:7b:10:f8", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49d5fb68-a7", "ovs_interfaceid": "49d5fb68-a759-487e-b35a-545fe16b7625", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1218.705334] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84582fe-df38-4a6e-8437-8671d259916a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.708430] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:10:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49d5fb68-a759-487e-b35a-545fe16b7625', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1218.716399] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Creating folder: Project (eeccf8ef3e5e42758835abff95dc73ec). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1218.719283] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4eb80354-50a8-4010-b3bb-55d390227e29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.742022] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 35c68986-51b5-43ba-a076-aca3c86d68bc/35c68986-51b5-43ba-a076-aca3c86d68bc.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1218.746424] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-573ed297-8f4f-406f-a21d-257a8db2c62e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.762270] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Created folder: Project (eeccf8ef3e5e42758835abff95dc73ec) in parent group-v353718. [ 1218.762270] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Creating folder: Instances. Parent ref: group-v353768. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1218.765797] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f01f73a-0dd1-4997-93dd-60d77272bd60 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.773995] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497391} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.775663] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1218.777126] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1218.777126] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1218.777126] env[63297]: value = "task-1696931" [ 1218.777126] env[63297]: _type = "Task" [ 1218.777126] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.777512] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a5eedf7-8ac6-40c8-9c77-1a96135da373 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.779736] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Created folder: Instances in parent group-v353768. [ 1218.779784] env[63297]: DEBUG oslo.service.loopingcall [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1218.782512] env[63297]: DEBUG oslo_concurrency.lockutils [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] Releasing lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.782512] env[63297]: DEBUG nova.compute.manager [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Received event network-changed-ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1218.782512] env[63297]: DEBUG nova.compute.manager [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Refreshing instance network info cache due to event network-changed-ba02b0f2-d414-4714-b54a-10f89df1af3a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1218.782512] env[63297]: DEBUG oslo_concurrency.lockutils [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] Acquiring lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1218.782512] env[63297]: DEBUG oslo_concurrency.lockutils [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] Acquired lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.782512] env[63297]: DEBUG nova.network.neutron [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Refreshing network info cache for port ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1218.786863] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1218.787823] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb4b2df2-3051-43a5-af7c-1606cb575943 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.811573] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1218.811573] env[63297]: value = "task-1696933" [ 1218.811573] env[63297]: _type = "Task" [ 1218.811573] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.820343] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1218.820343] env[63297]: value = "task-1696934" [ 1218.820343] env[63297]: _type = "Task" [ 1218.820343] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.821326] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.827980] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.835196] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696934, 'name': CreateVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.845618] env[63297]: INFO nova.compute.manager [-] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Took 3.78 seconds to deallocate network for instance. [ 1218.989037] env[63297]: DEBUG nova.network.neutron [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Successfully created port: e756a23f-dc86-4ac9-b42f-47196093abd3 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1219.012247] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecee12f-87d1-4a10-897a-a2047080f85c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.020627] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7345548b-6e1b-48f2-8128-fdcef6544a5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.061201] env[63297]: DEBUG nova.compute.manager [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1219.064335] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4303b808-6a65-49f8-81ed-8ce2ac580a1c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.072227] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce00f9fa-8f7b-4fb8-9010-0f8737ca123f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.090833] env[63297]: DEBUG nova.compute.provider_tree [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.299849] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696931, 'name': ReconfigVM_Task, 'duration_secs': 0.326455} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.299849] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 35c68986-51b5-43ba-a076-aca3c86d68bc/35c68986-51b5-43ba-a076-aca3c86d68bc.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1219.299849] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1919e6b-d241-421c-a084-bc260497b392 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.309142] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1219.309142] env[63297]: value = "task-1696935" [ 1219.309142] env[63297]: _type = "Task" [ 1219.309142] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.332280] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696935, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.341313] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073601} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.343926] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1219.343926] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb830b49-083d-4737-9f16-b15372952bb7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.350089] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696934, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.351997] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.370252] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1219.373513] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e6c29df-399f-4a34-b4f0-cade31f79a65 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.401943] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1219.401943] env[63297]: value = "task-1696936" [ 1219.401943] env[63297]: _type = "Task" [ 1219.401943] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.411153] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.419631] env[63297]: DEBUG nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1219.447114] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1219.447114] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1219.447114] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1219.447114] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1219.447520] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1219.447864] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1219.448225] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1219.448506] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1219.448936] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1219.449266] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1219.449566] env[63297]: DEBUG nova.virt.hardware [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1219.450555] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561a47c8-e3df-4c69-af0a-c8db4a1e8843 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.463287] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce39f2c-3a37-4473-9f89-db43506c69c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.594998] env[63297]: DEBUG nova.scheduler.client.report [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1219.603624] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.829417] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696935, 'name': Rename_Task, 'duration_secs': 0.150708} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.829417] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1219.830168] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a013df16-e548-486a-9a26-8fd27f45d508 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.837109] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "ef3346b1-ce09-4616-bdf4-200ea31efd01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.837348] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "ef3346b1-ce09-4616-bdf4-200ea31efd01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.837545] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "ef3346b1-ce09-4616-bdf4-200ea31efd01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.837805] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "ef3346b1-ce09-4616-bdf4-200ea31efd01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.837897] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "ef3346b1-ce09-4616-bdf4-200ea31efd01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.842498] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1219.842498] env[63297]: value = "task-1696937" [ 1219.842498] env[63297]: _type = "Task" [ 1219.842498] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.843420] env[63297]: INFO nova.compute.manager [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Terminating instance [ 1219.848429] env[63297]: DEBUG nova.compute.manager [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1219.848619] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1219.856288] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd45cb8b-590d-4505-b96f-bf43be32b891 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.856288] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696934, 'name': CreateVM_Task, 'duration_secs': 0.729372} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.857277] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1219.858735] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.858735] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.858735] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1219.865389] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2392266e-12de-4075-a117-2ab00c27a2c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.865389] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696937, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.870793] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1219.871651] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63d597cd-3c34-4c9a-958f-0330c55b1d41 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.875257] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1219.875257] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52649861-b943-065e-0959-504d3d284179" [ 1219.875257] env[63297]: _type = "Task" [ 1219.875257] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.884033] env[63297]: DEBUG oslo_vmware.api [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1219.884033] env[63297]: value = "task-1696938" [ 1219.884033] env[63297]: _type = "Task" [ 1219.884033] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.893284] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52649861-b943-065e-0959-504d3d284179, 'name': SearchDatastore_Task, 'duration_secs': 0.009447} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.893977] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1219.894325] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1219.894583] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.894724] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.894923] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1219.895232] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c029305f-90f1-467e-935c-1b0036f28c58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.900816] env[63297]: DEBUG oslo_vmware.api [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696938, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.907749] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1219.907823] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1219.909100] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bab2fc26-7313-4ecd-90e4-4ad3cac8bdca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.914801] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696936, 'name': ReconfigVM_Task, 'duration_secs': 0.296263} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.916176] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1219.916176] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f7ce3f8-4a12-4c36-8475-6b67f59754c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.919027] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1219.919027] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525dede6-bf93-db46-f276-6b7884153049" [ 1219.919027] env[63297]: _type = "Task" [ 1219.919027] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.924613] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1219.924613] env[63297]: value = "task-1696939" [ 1219.924613] env[63297]: _type = "Task" [ 1219.924613] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.927714] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525dede6-bf93-db46-f276-6b7884153049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.936772] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696939, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.002746] env[63297]: DEBUG nova.network.neutron [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Updated VIF entry in instance network info cache for port ba02b0f2-d414-4714-b54a-10f89df1af3a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1220.002746] env[63297]: DEBUG nova.network.neutron [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Updating instance_info_cache with network_info: [{"id": "ba02b0f2-d414-4714-b54a-10f89df1af3a", "address": "fa:16:3e:4d:c6:d2", "network": {"id": "8be91b0b-92c2-4755-a194-c3063691c530", "bridge": "br-int", "label": "tempest-ServersTestJSON-2064421146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f506a4251d434aaf92405307f98a67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba02b0f2-d4", "ovs_interfaceid": "ba02b0f2-d414-4714-b54a-10f89df1af3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.111027] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.719s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.111948] env[63297]: DEBUG nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1220.117193] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.605s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.117433] env[63297]: DEBUG nova.objects.instance [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lazy-loading 'resources' on Instance uuid 8adfd26f-1012-4e52-9371-e9d3f654046c {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.121592] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Received event network-vif-plugged-49d5fb68-a759-487e-b35a-545fe16b7625 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1220.121592] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Acquiring lock "4438e230-0589-48ae-8848-d1f8414efa61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.121592] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Lock "4438e230-0589-48ae-8848-d1f8414efa61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.121592] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Lock "4438e230-0589-48ae-8848-d1f8414efa61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.121592] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] No waiting events found dispatching network-vif-plugged-49d5fb68-a759-487e-b35a-545fe16b7625 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1220.122124] env[63297]: WARNING nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Received unexpected event network-vif-plugged-49d5fb68-a759-487e-b35a-545fe16b7625 for instance with vm_state building and task_state spawning. [ 1220.122124] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Received event network-changed-49d5fb68-a759-487e-b35a-545fe16b7625 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1220.122124] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Refreshing instance network info cache due to event network-changed-49d5fb68-a759-487e-b35a-545fe16b7625. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1220.122870] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Acquiring lock "refresh_cache-4438e230-0589-48ae-8848-d1f8414efa61" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.122870] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Acquired lock "refresh_cache-4438e230-0589-48ae-8848-d1f8414efa61" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.122870] env[63297]: DEBUG nova.network.neutron [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Refreshing network info cache for port 49d5fb68-a759-487e-b35a-545fe16b7625 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1220.161126] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquiring lock "87fa97a7-a8a5-4184-b52a-b02ad5468127" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.161391] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "87fa97a7-a8a5-4184-b52a-b02ad5468127" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.358047] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696937, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.398644] env[63297]: DEBUG oslo_vmware.api [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696938, 'name': PowerOffVM_Task, 'duration_secs': 0.216999} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.399043] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1220.399293] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1220.399620] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0218bcc2-ed5b-430b-8af6-70c32b14583e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.430490] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525dede6-bf93-db46-f276-6b7884153049, 'name': SearchDatastore_Task, 'duration_secs': 0.009408} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.434853] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b555cc5-17ed-4063-b51f-a96499233eea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.443042] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696939, 'name': Rename_Task, 'duration_secs': 0.161532} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.444467] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1220.444881] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1220.444881] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5211858d-c618-52b0-8b8f-91fabac65899" [ 1220.444881] env[63297]: _type = "Task" [ 1220.444881] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.445372] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ceb9af1-f211-4c5c-9745-21e08852fb61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.459896] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5211858d-c618-52b0-8b8f-91fabac65899, 'name': SearchDatastore_Task, 'duration_secs': 0.009765} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.462126] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1220.462543] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 4438e230-0589-48ae-8848-d1f8414efa61/4438e230-0589-48ae-8848-d1f8414efa61.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1220.463036] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1220.463036] env[63297]: value = "task-1696941" [ 1220.463036] env[63297]: _type = "Task" [ 1220.463036] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.463478] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0838c8b8-135f-43f1-9ea8-af734ae97aec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.475055] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696941, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.479569] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1220.479569] env[63297]: value = "task-1696942" [ 1220.479569] env[63297]: _type = "Task" [ 1220.479569] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.504661] env[63297]: DEBUG oslo_concurrency.lockutils [req-fc017f39-888b-4190-9b15-8ece801fe631 req-1262e26b-dbd4-4098-a5eb-85ccc093fee0 service nova] Releasing lock "refresh_cache-ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1220.506999] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1220.507353] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1220.507433] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleting the datastore file [datastore1] ef3346b1-ce09-4616-bdf4-200ea31efd01 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1220.507987] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e69b03d3-714d-47a8-ba0b-a5e65790f76a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.520968] env[63297]: DEBUG oslo_vmware.api [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1220.520968] env[63297]: value = "task-1696943" [ 1220.520968] env[63297]: _type = "Task" [ 1220.520968] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.530246] env[63297]: DEBUG oslo_vmware.api [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696943, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.633885] env[63297]: DEBUG nova.compute.utils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1220.639265] env[63297]: DEBUG nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1220.639372] env[63297]: DEBUG nova.network.neutron [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1220.812465] env[63297]: DEBUG nova.policy [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '077b7be132cb45bd98fafd1ca2cdde8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eeccf8ef3e5e42758835abff95dc73ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1220.860218] env[63297]: DEBUG oslo_vmware.api [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1696937, 'name': PowerOnVM_Task, 'duration_secs': 0.525126} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.860572] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1220.860773] env[63297]: INFO nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Took 12.16 seconds to spawn the instance on the hypervisor. [ 1220.860952] env[63297]: DEBUG nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1220.862071] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671c5fc1-2d23-4197-9781-426096fabaa6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.984774] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696941, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.998918] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696942, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.033765] env[63297]: DEBUG oslo_vmware.api [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1696943, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163523} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.035314] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1221.035314] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1221.035314] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1221.035314] env[63297]: INFO nova.compute.manager [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1221.035314] env[63297]: DEBUG oslo.service.loopingcall [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1221.035314] env[63297]: DEBUG nova.compute.manager [-] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1221.035694] env[63297]: DEBUG nova.network.neutron [-] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1221.124778] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d17598a-a73c-4595-b1d3-4d4ce4af9c68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.143405] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4029afc-f0a4-4d3c-94e2-738c187103d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.147351] env[63297]: DEBUG nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1221.177062] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11608999-2f32-4220-8b7b-2a9c56cfb705 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.186907] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c16cd7a-b42c-4ca7-b863-e6f0e6c4dae2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.201602] env[63297]: DEBUG nova.compute.provider_tree [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.392640] env[63297]: INFO nova.compute.manager [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Took 27.82 seconds to build instance. [ 1221.477816] env[63297]: DEBUG oslo_vmware.api [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696941, 'name': PowerOnVM_Task, 'duration_secs': 0.727324} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.478780] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1221.478780] env[63297]: DEBUG nova.compute.manager [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1221.479191] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a255f8-4872-4a58-90c8-88282e3e818b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.500715] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696942, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544719} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.501036] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 4438e230-0589-48ae-8848-d1f8414efa61/4438e230-0589-48ae-8848-d1f8414efa61.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1221.501228] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1221.501488] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3264c0a3-964c-429d-bfc5-59dac9c966a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.509309] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1221.509309] env[63297]: value = "task-1696944" [ 1221.509309] env[63297]: _type = "Task" [ 1221.509309] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.520628] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.550835] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquiring lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.550835] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.550835] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquiring lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.550835] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.550835] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.554936] env[63297]: INFO nova.compute.manager [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Terminating instance [ 1221.559314] env[63297]: DEBUG nova.compute.manager [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1221.559454] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.565426] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61229e13-ddd0-4993-84e9-01148b515b99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.568657] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.568912] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a90724f0-af00-4d25-8dd5-87ba5da57acb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.576201] env[63297]: DEBUG oslo_vmware.api [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1221.576201] env[63297]: value = "task-1696945" [ 1221.576201] env[63297]: _type = "Task" [ 1221.576201] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.592067] env[63297]: DEBUG oslo_vmware.api [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.704152] env[63297]: DEBUG nova.scheduler.client.report [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1221.713308] env[63297]: DEBUG nova.network.neutron [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Updated VIF entry in instance network info cache for port 49d5fb68-a759-487e-b35a-545fe16b7625. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1221.713308] env[63297]: DEBUG nova.network.neutron [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Updating instance_info_cache with network_info: [{"id": "49d5fb68-a759-487e-b35a-545fe16b7625", "address": "fa:16:3e:7b:10:f8", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49d5fb68-a7", "ovs_interfaceid": "49d5fb68-a759-487e-b35a-545fe16b7625", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.796844] env[63297]: DEBUG nova.network.neutron [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Successfully updated port: e756a23f-dc86-4ac9-b42f-47196093abd3 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1221.801548] env[63297]: DEBUG nova.network.neutron [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Successfully created port: ca5923c7-8c5e-4d13-8abd-34373c2d8dd5 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1221.897130] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39d05302-9905-4471-b2c3-e2b96c79a98d tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.338s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.009207] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.023554] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070471} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.023554] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1222.024208] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8814f20-42af-4712-bed3-ac24d6a1aada {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.054872] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 4438e230-0589-48ae-8848-d1f8414efa61/4438e230-0589-48ae-8848-d1f8414efa61.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.055766] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eedf66e5-2282-43a6-aafc-e7aaeaa6c640 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.080253] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1222.080253] env[63297]: value = "task-1696946" [ 1222.080253] env[63297]: _type = "Task" [ 1222.080253] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.092882] env[63297]: DEBUG oslo_vmware.api [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696945, 'name': PowerOffVM_Task, 'duration_secs': 0.343814} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.095348] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.095902] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.096224] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696946, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.097034] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-614f7b46-f121-4c7d-9f44-b9ca7aae8270 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.163496] env[63297]: DEBUG nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1222.199593] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.200113] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.200365] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Deleting the datastore file [datastore1] 6d290634-67e7-4fb4-9a88-3da6eca34d4b {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.202857] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1222.203353] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1222.203593] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1222.203836] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1222.204027] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1222.204217] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1222.204636] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1222.204856] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1222.205091] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1222.205304] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1222.205517] env[63297]: DEBUG nova.virt.hardware [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1222.205825] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ccf8258-89db-4e64-9efe-0984b09d71b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.208821] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1a36f1-a262-450f-85da-3e9466177366 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.212925] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Releasing lock "refresh_cache-4438e230-0589-48ae-8848-d1f8414efa61" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.213213] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-vif-deleted-881aea0b-28e5-4b91-af8d-d9c7c69b6446 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1222.213458] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Received event network-changed-e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1222.213666] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Refreshing instance network info cache due to event network-changed-e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1222.213908] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Acquiring lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1222.214361] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Acquired lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.214361] env[63297]: DEBUG nova.network.neutron [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Refreshing network info cache for port e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1222.215942] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.222065] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.407s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.222065] env[63297]: DEBUG nova.objects.instance [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lazy-loading 'resources' on Instance uuid 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1222.223045] env[63297]: DEBUG oslo_vmware.api [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for the task: (returnval){ [ 1222.223045] env[63297]: value = "task-1696948" [ 1222.223045] env[63297]: _type = "Task" [ 1222.223045] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.224840] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89252c76-347c-4d53-a75b-72e91df9e82d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.252670] env[63297]: DEBUG oslo_vmware.api [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.253013] env[63297]: INFO nova.scheduler.client.report [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Deleted allocations for instance 8adfd26f-1012-4e52-9371-e9d3f654046c [ 1222.299693] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquiring lock "refresh_cache-746742ac-8d7a-466b-8bc0-043cb5422111" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1222.299693] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquired lock "refresh_cache-746742ac-8d7a-466b-8bc0-043cb5422111" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.299693] env[63297]: DEBUG nova.network.neutron [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1222.401187] env[63297]: DEBUG nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1222.494889] env[63297]: DEBUG nova.network.neutron [-] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.593478] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696946, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.738445] env[63297]: DEBUG oslo_vmware.api [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Task: {'id': task-1696948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231281} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.738693] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.739187] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1222.739187] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.739322] env[63297]: INFO nova.compute.manager [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1222.739705] env[63297]: DEBUG oslo.service.loopingcall [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1222.739943] env[63297]: DEBUG nova.compute.manager [-] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1222.740307] env[63297]: DEBUG nova.network.neutron [-] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1222.760048] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e54bbfaa-637f-4159-80fd-e5f6821fd626 tempest-ImagesNegativeTestJSON-2109446866 tempest-ImagesNegativeTestJSON-2109446866-project-member] Lock "8adfd26f-1012-4e52-9371-e9d3f654046c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.701s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.892242] env[63297]: DEBUG nova.network.neutron [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1222.930998] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.997399] env[63297]: INFO nova.compute.manager [-] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Took 1.96 seconds to deallocate network for instance. [ 1223.095825] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696946, 'name': ReconfigVM_Task, 'duration_secs': 0.544922} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.095825] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 4438e230-0589-48ae-8848-d1f8414efa61/4438e230-0589-48ae-8848-d1f8414efa61.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.105034] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36bf6e5a-233b-4b58-b091-913e0a131bdf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.118471] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1223.118471] env[63297]: value = "task-1696949" [ 1223.118471] env[63297]: _type = "Task" [ 1223.118471] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.130976] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696949, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.225835] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4081245-c535-4152-bfc0-2cff90b7ff88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.243616] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb700a1-a35a-4ef6-ab3b-049605e01892 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.281393] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26573c2e-ecbe-4ecd-8cc8-217fff94410d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.290965] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0baa192-1d40-44ea-ab7a-854511b9b2ed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.305663] env[63297]: DEBUG nova.compute.provider_tree [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.329532] env[63297]: DEBUG nova.compute.manager [req-a1b05bbf-35fd-490f-9cd9-67a462308310 req-27d35696-adc7-4637-9ba5-57ece22bf17f service nova] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Received event network-vif-deleted-c914211d-555e-4b13-b990-64105599d395 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1223.352569] env[63297]: DEBUG nova.network.neutron [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Updating instance_info_cache with network_info: [{"id": "e756a23f-dc86-4ac9-b42f-47196093abd3", "address": "fa:16:3e:95:10:b4", "network": {"id": "d05b1eab-223e-4ac8-a5e8-c795e6864f9a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1095197402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2a338e9883940f99ab4136eb4d102ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape756a23f-dc", "ovs_interfaceid": "e756a23f-dc86-4ac9-b42f-47196093abd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.508256] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.528255] env[63297]: DEBUG nova.network.neutron [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Updated VIF entry in instance network info cache for port e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1223.532204] env[63297]: DEBUG nova.network.neutron [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Updating instance_info_cache with network_info: [{"id": "e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c", "address": "fa:16:3e:ff:06:03", "network": {"id": "bfd15893-9bb1-46a3-bf31-db474ed0269a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1591634149-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734a95312d7d4da38201457d4f542a9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1fefdf1-1c", "ovs_interfaceid": "e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.566013] env[63297]: DEBUG nova.compute.manager [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Received event network-vif-plugged-e756a23f-dc86-4ac9-b42f-47196093abd3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1223.566238] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] Acquiring lock "746742ac-8d7a-466b-8bc0-043cb5422111-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.566438] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] Lock "746742ac-8d7a-466b-8bc0-043cb5422111-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.566695] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] Lock "746742ac-8d7a-466b-8bc0-043cb5422111-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.568091] env[63297]: DEBUG nova.compute.manager [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] No waiting events found dispatching network-vif-plugged-e756a23f-dc86-4ac9-b42f-47196093abd3 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1223.568091] env[63297]: WARNING nova.compute.manager [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Received unexpected event network-vif-plugged-e756a23f-dc86-4ac9-b42f-47196093abd3 for instance with vm_state building and task_state spawning. [ 1223.568091] env[63297]: DEBUG nova.compute.manager [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Received event network-changed-e756a23f-dc86-4ac9-b42f-47196093abd3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1223.568091] env[63297]: DEBUG nova.compute.manager [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Refreshing instance network info cache due to event network-changed-e756a23f-dc86-4ac9-b42f-47196093abd3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1223.568091] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] Acquiring lock "refresh_cache-746742ac-8d7a-466b-8bc0-043cb5422111" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.634084] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696949, 'name': Rename_Task, 'duration_secs': 0.145063} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.634446] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1223.634716] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa4733c8-1e2b-4438-9662-e0dfc78ef4a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.642834] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1223.642834] env[63297]: value = "task-1696950" [ 1223.642834] env[63297]: _type = "Task" [ 1223.642834] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.651432] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696950, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.809575] env[63297]: DEBUG nova.scheduler.client.report [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1223.855642] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Releasing lock "refresh_cache-746742ac-8d7a-466b-8bc0-043cb5422111" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1223.855975] env[63297]: DEBUG nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Instance network_info: |[{"id": "e756a23f-dc86-4ac9-b42f-47196093abd3", "address": "fa:16:3e:95:10:b4", "network": {"id": "d05b1eab-223e-4ac8-a5e8-c795e6864f9a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1095197402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2a338e9883940f99ab4136eb4d102ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape756a23f-dc", "ovs_interfaceid": "e756a23f-dc86-4ac9-b42f-47196093abd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1223.856288] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] Acquired lock "refresh_cache-746742ac-8d7a-466b-8bc0-043cb5422111" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.856464] env[63297]: DEBUG nova.network.neutron [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Refreshing network info cache for port e756a23f-dc86-4ac9-b42f-47196093abd3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1223.857661] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:10:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e756a23f-dc86-4ac9-b42f-47196093abd3', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1223.868361] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Creating folder: Project (a2a338e9883940f99ab4136eb4d102ad). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1223.876967] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5065c090-6acb-4ec3-85d5-88d8ba11d97e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.884071] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquiring lock "22a927ad-c2af-4814-b728-ec31b76a34d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.884343] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "22a927ad-c2af-4814-b728-ec31b76a34d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.895381] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Created folder: Project (a2a338e9883940f99ab4136eb4d102ad) in parent group-v353718. [ 1223.895381] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Creating folder: Instances. Parent ref: group-v353771. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1223.895381] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7bc01593-6380-4081-9386-36d0bf0737ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.905873] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Created folder: Instances in parent group-v353771. [ 1223.906158] env[63297]: DEBUG oslo.service.loopingcall [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1223.906344] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1223.906548] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20686178-b07d-449b-bf39-f5b22123a4fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.936116] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1223.936116] env[63297]: value = "task-1696953" [ 1223.936116] env[63297]: _type = "Task" [ 1223.936116] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.944589] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696953, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.038193] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] Releasing lock "refresh_cache-b5d34058-fa3e-4806-97e5-638bbbffaeb8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1224.038193] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-vif-deleted-01a6609e-6d94-400f-8f01-fc67889fb600 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1224.038193] env[63297]: DEBUG nova.compute.manager [req-0f248c33-8b3e-425e-8486-4517c684648a req-f60b2d46-7e2a-4ef2-af05-5f46f2e03304 service nova] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Received event network-vif-deleted-f6b65dc1-9bc4-4fbd-8f16-7e9393f040b6 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1224.153379] env[63297]: DEBUG oslo_vmware.api [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696950, 'name': PowerOnVM_Task, 'duration_secs': 0.454837} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.157311] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1224.157311] env[63297]: INFO nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Took 12.91 seconds to spawn the instance on the hypervisor. [ 1224.157311] env[63297]: DEBUG nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1224.157311] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1ed262-c957-47e2-b58e-f022213b3be8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.318399] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.320885] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.428s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.321210] env[63297]: DEBUG nova.objects.instance [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lazy-loading 'resources' on Instance uuid cce038d4-dc9a-4fae-8348-1c2f674b79e3 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1224.384147] env[63297]: INFO nova.scheduler.client.report [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Deleted allocations for instance 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a [ 1224.454489] env[63297]: INFO nova.compute.manager [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Rebuilding instance [ 1224.457026] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696953, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.495112] env[63297]: DEBUG nova.network.neutron [-] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.532151] env[63297]: DEBUG nova.compute.manager [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1224.534159] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50157ead-0413-4138-afaa-17b27eaf87a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.561780] env[63297]: DEBUG nova.network.neutron [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Updated VIF entry in instance network info cache for port e756a23f-dc86-4ac9-b42f-47196093abd3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1224.562231] env[63297]: DEBUG nova.network.neutron [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Updating instance_info_cache with network_info: [{"id": "e756a23f-dc86-4ac9-b42f-47196093abd3", "address": "fa:16:3e:95:10:b4", "network": {"id": "d05b1eab-223e-4ac8-a5e8-c795e6864f9a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1095197402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2a338e9883940f99ab4136eb4d102ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape756a23f-dc", "ovs_interfaceid": "e756a23f-dc86-4ac9-b42f-47196093abd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.680560] env[63297]: INFO nova.compute.manager [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Took 29.65 seconds to build instance. [ 1224.899656] env[63297]: DEBUG oslo_concurrency.lockutils [None req-56c32d65-c6fc-4634-bb8f-24bb2fa4b62b tempest-ServerDiagnosticsV248Test-1481871939 tempest-ServerDiagnosticsV248Test-1481871939-project-member] Lock "7f8849fb-c5d6-47a1-8079-08dfb2e0b85a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.043s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.950394] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696953, 'name': CreateVM_Task, 'duration_secs': 0.542671} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.952978] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1224.954238] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1224.954335] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.958071] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1224.958071] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6eb3547-1fdd-435e-87f5-6e5151b8ad10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.961174] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1224.961174] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524bd7ee-9da6-5435-78db-e855f9749470" [ 1224.961174] env[63297]: _type = "Task" [ 1224.961174] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.974216] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524bd7ee-9da6-5435-78db-e855f9749470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.985689] env[63297]: DEBUG nova.network.neutron [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Successfully updated port: ca5923c7-8c5e-4d13-8abd-34373c2d8dd5 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1225.001029] env[63297]: INFO nova.compute.manager [-] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Took 2.26 seconds to deallocate network for instance. [ 1225.055438] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1225.056656] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c89a3cc-3ecf-4110-b18f-6288a06f467b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.064395] env[63297]: DEBUG oslo_concurrency.lockutils [req-0f2478aa-5ff8-4c97-ad2d-6dab043aa78c req-9bdd034f-89da-441b-b0ce-a4a669a5b2d9 service nova] Releasing lock "refresh_cache-746742ac-8d7a-466b-8bc0-043cb5422111" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.066017] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1225.066017] env[63297]: value = "task-1696954" [ 1225.066017] env[63297]: _type = "Task" [ 1225.066017] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.077580] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.184777] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9ba91bd-725e-4857-b268-e6d20aa01243 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "4438e230-0589-48ae-8848-d1f8414efa61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.172s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.325148] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4b85b4-590a-49dc-b361-a7641121b6be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.336101] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3407025-0377-41c1-b04f-54f4382c403b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.379973] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d1ff07-e404-42d5-bde3-3bf526f13303 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.390033] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23016ec2-38b6-495d-bf64-ab00d21e2d6b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.405907] env[63297]: DEBUG nova.compute.provider_tree [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.474848] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524bd7ee-9da6-5435-78db-e855f9749470, 'name': SearchDatastore_Task, 'duration_secs': 0.035729} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.478190] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.478430] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1225.478660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.478800] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.478995] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.479628] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-856879b0-5540-4a37-94fb-17a4189480f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.488650] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.488833] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1225.489862] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31951576-a612-49a3-a309-a4f86f7c5847 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.496085] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "refresh_cache-eff06e8a-8341-4d5e-b6dd-a585be4a21ea" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.496223] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquired lock "refresh_cache-eff06e8a-8341-4d5e-b6dd-a585be4a21ea" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.496366] env[63297]: DEBUG nova.network.neutron [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1225.502705] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1225.502705] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52335393-691c-579a-74fa-24b60be8b239" [ 1225.502705] env[63297]: _type = "Task" [ 1225.502705] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.511971] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52335393-691c-579a-74fa-24b60be8b239, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.514849] env[63297]: DEBUG nova.compute.manager [req-d10c35c0-7b4a-4a00-b2f1-7dd72151af62 req-e1009ae6-a0e3-4073-a548-b7ba6ba4102b service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Received event network-changed-8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1225.514996] env[63297]: DEBUG nova.compute.manager [req-d10c35c0-7b4a-4a00-b2f1-7dd72151af62 req-e1009ae6-a0e3-4073-a548-b7ba6ba4102b service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Refreshing instance network info cache due to event network-changed-8cf1041d-9ff1-4cf4-808d-40d2edaf0e06. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1225.515134] env[63297]: DEBUG oslo_concurrency.lockutils [req-d10c35c0-7b4a-4a00-b2f1-7dd72151af62 req-e1009ae6-a0e3-4073-a548-b7ba6ba4102b service nova] Acquiring lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.515266] env[63297]: DEBUG oslo_concurrency.lockutils [req-d10c35c0-7b4a-4a00-b2f1-7dd72151af62 req-e1009ae6-a0e3-4073-a548-b7ba6ba4102b service nova] Acquired lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.515441] env[63297]: DEBUG nova.network.neutron [req-d10c35c0-7b4a-4a00-b2f1-7dd72151af62 req-e1009ae6-a0e3-4073-a548-b7ba6ba4102b service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Refreshing network info cache for port 8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1225.517612] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.576657] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696954, 'name': PowerOffVM_Task, 'duration_secs': 0.233986} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.580062] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1225.580062] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1225.580062] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b221cce4-7de9-4a16-9821-694c58961817 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.589099] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1225.589376] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-113aa924-d89b-4ad2-96b5-8c3d480dc3ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.614779] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1225.615017] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1225.615204] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Deleting the datastore file [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1225.615464] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63647825-1372-48d0-8ec0-fbadba78c928 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.623670] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1225.623670] env[63297]: value = "task-1696956" [ 1225.623670] env[63297]: _type = "Task" [ 1225.623670] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.634909] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.687957] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1225.909546] env[63297]: DEBUG nova.scheduler.client.report [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1225.972204] env[63297]: DEBUG nova.compute.manager [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Received event network-vif-deleted-20697777-dceb-47b1-8edc-c6f3abc08f0f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1225.972830] env[63297]: DEBUG nova.compute.manager [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Received event network-vif-plugged-ca5923c7-8c5e-4d13-8abd-34373c2d8dd5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1225.974736] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] Acquiring lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.975161] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] Lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.975514] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] Lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.978024] env[63297]: DEBUG nova.compute.manager [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] No waiting events found dispatching network-vif-plugged-ca5923c7-8c5e-4d13-8abd-34373c2d8dd5 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1225.978024] env[63297]: WARNING nova.compute.manager [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Received unexpected event network-vif-plugged-ca5923c7-8c5e-4d13-8abd-34373c2d8dd5 for instance with vm_state building and task_state spawning. [ 1225.978024] env[63297]: DEBUG nova.compute.manager [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Received event network-changed-ca5923c7-8c5e-4d13-8abd-34373c2d8dd5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1225.978024] env[63297]: DEBUG nova.compute.manager [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Refreshing instance network info cache due to event network-changed-ca5923c7-8c5e-4d13-8abd-34373c2d8dd5. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1225.978024] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] Acquiring lock "refresh_cache-eff06e8a-8341-4d5e-b6dd-a585be4a21ea" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.016747] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52335393-691c-579a-74fa-24b60be8b239, 'name': SearchDatastore_Task, 'duration_secs': 0.010873} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.018287] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49cd4aec-4166-47cb-9c03-bbe1a4407871 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.026184] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1226.026184] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d38a1-972b-1b32-aa6b-5f19b8c29113" [ 1226.026184] env[63297]: _type = "Task" [ 1226.026184] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.038411] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d38a1-972b-1b32-aa6b-5f19b8c29113, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.140612] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098786} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.140612] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1226.140612] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1226.140612] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1226.159855] env[63297]: DEBUG nova.network.neutron [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1226.220162] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.423177] env[63297]: DEBUG nova.network.neutron [req-d10c35c0-7b4a-4a00-b2f1-7dd72151af62 req-e1009ae6-a0e3-4073-a548-b7ba6ba4102b service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Updated VIF entry in instance network info cache for port 8cf1041d-9ff1-4cf4-808d-40d2edaf0e06. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1226.423177] env[63297]: DEBUG nova.network.neutron [req-d10c35c0-7b4a-4a00-b2f1-7dd72151af62 req-e1009ae6-a0e3-4073-a548-b7ba6ba4102b service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Updating instance_info_cache with network_info: [{"id": "8cf1041d-9ff1-4cf4-808d-40d2edaf0e06", "address": "fa:16:3e:bc:60:fa", "network": {"id": "f7678fac-2ee8-4f1c-8731-678d3fa06e4d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-63536799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71764c2eb27d41208f88179e9c39c0bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cf1041d-9f", "ovs_interfaceid": "8cf1041d-9ff1-4cf4-808d-40d2edaf0e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.425710] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.429922] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.616s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.431987] env[63297]: INFO nova.compute.claims [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1226.458536] env[63297]: INFO nova.scheduler.client.report [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Deleted allocations for instance cce038d4-dc9a-4fae-8348-1c2f674b79e3 [ 1226.542837] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d38a1-972b-1b32-aa6b-5f19b8c29113, 'name': SearchDatastore_Task, 'duration_secs': 0.046791} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.543196] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.543449] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 746742ac-8d7a-466b-8bc0-043cb5422111/746742ac-8d7a-466b-8bc0-043cb5422111.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1226.544326] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2951d531-1e53-497e-8cbb-775065d82275 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.553041] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1226.553041] env[63297]: value = "task-1696957" [ 1226.553041] env[63297]: _type = "Task" [ 1226.553041] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.564440] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.719223] env[63297]: DEBUG nova.network.neutron [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Updating instance_info_cache with network_info: [{"id": "ca5923c7-8c5e-4d13-8abd-34373c2d8dd5", "address": "fa:16:3e:77:14:4a", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5923c7-8c", "ovs_interfaceid": "ca5923c7-8c5e-4d13-8abd-34373c2d8dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.929772] env[63297]: DEBUG oslo_concurrency.lockutils [req-d10c35c0-7b4a-4a00-b2f1-7dd72151af62 req-e1009ae6-a0e3-4073-a548-b7ba6ba4102b service nova] Releasing lock "refresh_cache-35c68986-51b5-43ba-a076-aca3c86d68bc" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.979911] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9246ebf-3051-4e51-95d1-adec1730eafd tempest-ServerShowV257Test-356643135 tempest-ServerShowV257Test-356643135-project-member] Lock "cce038d4-dc9a-4fae-8348-1c2f674b79e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.055s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.065269] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696957, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.189875] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1227.190124] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1227.190328] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1227.190519] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1227.190662] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1227.191258] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1227.192771] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1227.192771] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1227.192771] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1227.194752] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1227.195053] env[63297]: DEBUG nova.virt.hardware [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1227.195871] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca199987-23f5-487a-9de1-97b1179f0341 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.208763] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83352cc0-518b-4823-9c6f-79d91556890f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.226508] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Releasing lock "refresh_cache-eff06e8a-8341-4d5e-b6dd-a585be4a21ea" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1227.226785] env[63297]: DEBUG nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Instance network_info: |[{"id": "ca5923c7-8c5e-4d13-8abd-34373c2d8dd5", "address": "fa:16:3e:77:14:4a", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5923c7-8c", "ovs_interfaceid": "ca5923c7-8c5e-4d13-8abd-34373c2d8dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1227.227915] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1227.233443] env[63297]: DEBUG oslo.service.loopingcall [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.233508] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] Acquired lock "refresh_cache-eff06e8a-8341-4d5e-b6dd-a585be4a21ea" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.233630] env[63297]: DEBUG nova.network.neutron [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Refreshing network info cache for port ca5923c7-8c5e-4d13-8abd-34373c2d8dd5 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1227.236708] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:14:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca5923c7-8c5e-4d13-8abd-34373c2d8dd5', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1227.242571] env[63297]: DEBUG oslo.service.loopingcall [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.243096] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1227.243306] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1227.243503] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0d4f05b-a567-4a90-83f4-629d8f857981 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.256603] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42614b3c-42f0-46e9-b88d-fb4a6b141368 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.288020] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1227.288020] env[63297]: value = "task-1696958" [ 1227.288020] env[63297]: _type = "Task" [ 1227.288020] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.288020] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1227.288020] env[63297]: value = "task-1696959" [ 1227.288020] env[63297]: _type = "Task" [ 1227.288020] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.316927] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696958, 'name': CreateVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.316927] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696959, 'name': CreateVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.584573] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552595} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.588469] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 746742ac-8d7a-466b-8bc0-043cb5422111/746742ac-8d7a-466b-8bc0-043cb5422111.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1227.588635] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1227.589251] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a30e22b-b09b-4e92-834a-27976474187d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.596719] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1227.596719] env[63297]: value = "task-1696960" [ 1227.596719] env[63297]: _type = "Task" [ 1227.596719] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.607656] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696960, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.667064] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.800922] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696958, 'name': CreateVM_Task, 'duration_secs': 0.43441} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.806307] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1227.806946] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696959, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.810253] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.810253] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.810429] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1227.811671] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a87026e6-7722-41a6-bfc0-e4ad97063457 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.815585] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1227.815585] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a53e96-bdaf-57a0-b2a3-127929ed0190" [ 1227.815585] env[63297]: _type = "Task" [ 1227.815585] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.824014] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a53e96-bdaf-57a0-b2a3-127929ed0190, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.928419] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b2432e-2dba-4785-a264-b5c3b14a6b55 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.936343] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5358cd-061c-4a36-9e43-a0ae689f13cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.969696] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8babb46-26ab-4509-ad40-87f9e21ee242 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.976467] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338fd278-b0a3-4f73-86a7-3858651a6c9a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.990188] env[63297]: DEBUG nova.compute.provider_tree [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.108345] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696960, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079297} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.109310] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1228.109987] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873c3cf0-328f-4025-9331-7b8858b4c64e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.138528] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 746742ac-8d7a-466b-8bc0-043cb5422111/746742ac-8d7a-466b-8bc0-043cb5422111.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1228.139309] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-524a4906-f3c0-4c70-a175-42c61ea7da5e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.161461] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1228.161461] env[63297]: value = "task-1696961" [ 1228.161461] env[63297]: _type = "Task" [ 1228.161461] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.170399] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696961, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.303071] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696959, 'name': CreateVM_Task, 'duration_secs': 0.757585} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.303071] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1228.303071] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1228.332706] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a53e96-bdaf-57a0-b2a3-127929ed0190, 'name': SearchDatastore_Task, 'duration_secs': 0.011985} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.335614] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1228.336478] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1228.336478] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1228.336478] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.336478] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1228.338932] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.338932] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1228.338932] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-800565c3-7d52-45f2-b129-4929fe4461c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.340141] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf126a8b-7603-46a4-b170-ebaed0e0cfe2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.346138] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1228.346138] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52da4f1e-982a-305c-9432-a2d77ed04b89" [ 1228.346138] env[63297]: _type = "Task" [ 1228.346138] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.349760] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1228.349944] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1228.351487] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b6924c2-46e0-4208-9f55-938cfd1b0ee2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.357376] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52da4f1e-982a-305c-9432-a2d77ed04b89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.361241] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1228.361241] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d2abf-94c8-76a1-d58e-9fa2d3aceb83" [ 1228.361241] env[63297]: _type = "Task" [ 1228.361241] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.369242] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d2abf-94c8-76a1-d58e-9fa2d3aceb83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.473930] env[63297]: DEBUG nova.network.neutron [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Updated VIF entry in instance network info cache for port ca5923c7-8c5e-4d13-8abd-34373c2d8dd5. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1228.474337] env[63297]: DEBUG nova.network.neutron [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Updating instance_info_cache with network_info: [{"id": "ca5923c7-8c5e-4d13-8abd-34373c2d8dd5", "address": "fa:16:3e:77:14:4a", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5923c7-8c", "ovs_interfaceid": "ca5923c7-8c5e-4d13-8abd-34373c2d8dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.494435] env[63297]: DEBUG nova.scheduler.client.report [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1228.672265] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696961, 'name': ReconfigVM_Task, 'duration_secs': 0.405284} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.672578] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 746742ac-8d7a-466b-8bc0-043cb5422111/746742ac-8d7a-466b-8bc0-043cb5422111.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1228.673220] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ce376e7-01fb-491f-8237-e71be7006c94 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.680232] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1228.680232] env[63297]: value = "task-1696962" [ 1228.680232] env[63297]: _type = "Task" [ 1228.680232] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.688436] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696962, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.859033] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52da4f1e-982a-305c-9432-a2d77ed04b89, 'name': SearchDatastore_Task, 'duration_secs': 0.021818} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.859033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1228.859033] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1228.859033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1228.861791] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquiring lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.862373] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.871629] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d2abf-94c8-76a1-d58e-9fa2d3aceb83, 'name': SearchDatastore_Task, 'duration_secs': 0.011295} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.872502] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06a4913a-6a66-4c37-b9a7-0da20513e635 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.878065] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1228.878065] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5219e1b0-6bfd-f7b2-18b8-c42517c53a1e" [ 1228.878065] env[63297]: _type = "Task" [ 1228.878065] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.886324] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5219e1b0-6bfd-f7b2-18b8-c42517c53a1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.977659] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a9d0488-db6c-40d3-9230-896682259130 req-b134bafc-a9c9-46d5-ad5e-c662137b342a service nova] Releasing lock "refresh_cache-eff06e8a-8341-4d5e-b6dd-a585be4a21ea" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.002060] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.571s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.002060] env[63297]: DEBUG nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1229.006247] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.642s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.008201] env[63297]: INFO nova.compute.claims [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1229.194656] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696962, 'name': Rename_Task, 'duration_secs': 0.142529} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.194962] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1229.195273] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fe01e9c-8b1e-4fef-98cf-c5c1f4229ae9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.203078] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1229.203078] env[63297]: value = "task-1696963" [ 1229.203078] env[63297]: _type = "Task" [ 1229.203078] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.214938] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696963, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.394098] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5219e1b0-6bfd-f7b2-18b8-c42517c53a1e, 'name': SearchDatastore_Task, 'duration_secs': 0.010231} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.394098] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.394098] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1229.394098] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.394098] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1229.394098] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd2de395-a91c-4a00-b134-f4254905c1d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.394712] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-074380b7-1b4c-4b94-b2b9-ec2f4530c9e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.402504] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1229.402504] env[63297]: value = "task-1696964" [ 1229.402504] env[63297]: _type = "Task" [ 1229.402504] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.406691] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1229.406870] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1229.407651] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dcbbe02-a189-4f55-8f26-569263cf749a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.416671] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1229.416671] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52221e8e-1307-f2c4-8a2b-845cda53c18f" [ 1229.416671] env[63297]: _type = "Task" [ 1229.416671] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.417186] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.427750] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52221e8e-1307-f2c4-8a2b-845cda53c18f, 'name': SearchDatastore_Task, 'duration_secs': 0.010438} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.429910] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94a5f2ca-52cf-4991-a3b1-16ff8631f31f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.435032] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1229.435032] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e4f02b-98b4-6d1a-fa9d-80fa575dccd4" [ 1229.435032] env[63297]: _type = "Task" [ 1229.435032] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.444741] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e4f02b-98b4-6d1a-fa9d-80fa575dccd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.515466] env[63297]: DEBUG nova.compute.utils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1229.522887] env[63297]: DEBUG nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1229.523047] env[63297]: DEBUG nova.network.neutron [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1229.719884] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696963, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.723692] env[63297]: DEBUG nova.policy [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7597fe026ce45c9bda230ff1ef8a628', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2dd93edf6aee452280b8b7c441b75d38', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1229.919928] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696964, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.945316] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e4f02b-98b4-6d1a-fa9d-80fa575dccd4, 'name': SearchDatastore_Task, 'duration_secs': 0.009354} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.945588] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.946385] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] eff06e8a-8341-4d5e-b6dd-a585be4a21ea/eff06e8a-8341-4d5e-b6dd-a585be4a21ea.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1229.946385] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9c3cf12-8cb9-43b1-9041-63b7bb4724c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.953048] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1229.953048] env[63297]: value = "task-1696965" [ 1229.953048] env[63297]: _type = "Task" [ 1229.953048] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.961730] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.023619] env[63297]: DEBUG nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1230.224793] env[63297]: DEBUG oslo_vmware.api [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696963, 'name': PowerOnVM_Task, 'duration_secs': 1.000438} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.224977] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1230.225247] env[63297]: INFO nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Took 10.81 seconds to spawn the instance on the hypervisor. [ 1230.225798] env[63297]: DEBUG nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1230.226616] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d48f0c6-e64b-465d-9c3e-2536c0ddad11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.419273] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526806} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.419273] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1230.419273] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1230.419273] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43be5efd-a832-48c7-a579-7d9e77657bc5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.426363] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1230.426363] env[63297]: value = "task-1696966" [ 1230.426363] env[63297]: _type = "Task" [ 1230.426363] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.438553] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.463954] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696965, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440209} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.464465] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] eff06e8a-8341-4d5e-b6dd-a585be4a21ea/eff06e8a-8341-4d5e-b6dd-a585be4a21ea.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1230.464674] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1230.464793] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ffc32c2-5729-4365-a80a-1a20dc5560ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.473556] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1230.473556] env[63297]: value = "task-1696967" [ 1230.473556] env[63297]: _type = "Task" [ 1230.473556] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.485439] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.512213] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4816c4be-c35c-4a6d-ba69-0d697ca32e5d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.520430] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7012b039-5f18-4638-9960-65adda86c622 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.562514] env[63297]: DEBUG nova.network.neutron [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Successfully created port: 07cc9161-e617-439f-97b5-70331464cd31 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1230.565291] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8065d861-b9b3-4751-b5bd-58b65dbb9a4a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.574251] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9293de-298c-4e96-98ad-6da88cb8ce5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.589837] env[63297]: DEBUG nova.compute.provider_tree [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.755708] env[63297]: INFO nova.compute.manager [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Took 32.38 seconds to build instance. [ 1230.938973] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696966, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07158} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.938973] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1230.939394] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da6c173-4cf1-4860-a2f6-ed4fbb59b3e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.962032] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.962390] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-337671f0-43dc-4c30-a402-4d03921bf3b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.989023] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066103} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.989023] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1230.989023] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1230.989023] env[63297]: value = "task-1696968" [ 1230.989023] env[63297]: _type = "Task" [ 1230.989023] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.989493] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b95c93-15ea-40ad-8e18-3a100409b5f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.001196] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696968, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.022782] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] eff06e8a-8341-4d5e-b6dd-a585be4a21ea/eff06e8a-8341-4d5e-b6dd-a585be4a21ea.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1231.022782] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3955ee0-1930-46b3-9b82-180a683ee38f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.044484] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1231.044484] env[63297]: value = "task-1696969" [ 1231.044484] env[63297]: _type = "Task" [ 1231.044484] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.055961] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696969, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.070608] env[63297]: DEBUG nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1231.096237] env[63297]: DEBUG nova.scheduler.client.report [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1231.107198] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1231.107435] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1231.107587] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1231.107757] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1231.107896] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1231.108168] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1231.108559] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1231.108636] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1231.108835] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1231.109044] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1231.109305] env[63297]: DEBUG nova.virt.hardware [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1231.110994] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ed25f6-f215-41cb-a5b8-b2515c6fe912 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.121589] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0175ba39-c869-43fc-8488-fa6632d66297 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.179649] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "b65e8c04-df55-491e-861c-8aa6def8c9be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.179881] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "b65e8c04-df55-491e-861c-8aa6def8c9be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.260647] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9426399-1bb1-462f-ad97-889de3e31713 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "746742ac-8d7a-466b-8bc0-043cb5422111" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.840s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.502527] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696968, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.555663] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696969, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.600916] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.601528] env[63297]: DEBUG nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1231.604569] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.373s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.606280] env[63297]: INFO nova.compute.claims [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.661705] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.763768] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1232.004668] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696968, 'name': ReconfigVM_Task, 'duration_secs': 0.897424} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.005337] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb/13706c85-c23e-47cd-a7d8-2e902c11a7fb.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1232.006261] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-daa9dcae-e6cc-4a93-ae35-4031223fb0b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.014541] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1232.014541] env[63297]: value = "task-1696970" [ 1232.014541] env[63297]: _type = "Task" [ 1232.014541] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.026665] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696970, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.055612] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696969, 'name': ReconfigVM_Task, 'duration_secs': 0.87716} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.055912] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Reconfigured VM instance instance-00000012 to attach disk [datastore1] eff06e8a-8341-4d5e-b6dd-a585be4a21ea/eff06e8a-8341-4d5e-b6dd-a585be4a21ea.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1232.056610] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21e627a7-0a76-4c64-8db9-e4745ba05dbf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.063624] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1232.063624] env[63297]: value = "task-1696971" [ 1232.063624] env[63297]: _type = "Task" [ 1232.063624] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.072529] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696971, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.111957] env[63297]: DEBUG nova.compute.utils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1232.115028] env[63297]: DEBUG nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1232.117017] env[63297]: DEBUG nova.network.neutron [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1232.163569] env[63297]: DEBUG nova.policy [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d8414bb5b0b41e2b654e67de8001f53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e7e471b2638428080e76e0d2b740da4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1232.171112] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.297776] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.526844] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696970, 'name': Rename_Task, 'duration_secs': 0.1642} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.527962] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1232.527962] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3416e19-288c-44e9-9967-0cc869ac8d8b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.537736] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Waiting for the task: (returnval){ [ 1232.537736] env[63297]: value = "task-1696972" [ 1232.537736] env[63297]: _type = "Task" [ 1232.537736] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.554758] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.576136] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696971, 'name': Rename_Task, 'duration_secs': 0.210458} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.576564] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1232.576726] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3bbf57f2-957f-470d-807d-459722493b58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.583319] env[63297]: DEBUG nova.network.neutron [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Successfully created port: ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1232.586718] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1232.586718] env[63297]: value = "task-1696973" [ 1232.586718] env[63297]: _type = "Task" [ 1232.586718] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.595269] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.616142] env[63297]: DEBUG nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1232.668460] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.668460] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.668460] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1232.668460] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.817046] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.817046] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.049358] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696972, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.087211] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3ca00f-d9a4-4e54-98bd-973617de6ff6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.105822] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696973, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.106825] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a892f7-fb5c-41f1-b6f4-3a2ad223ed99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.142890] env[63297]: DEBUG nova.network.neutron [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Successfully updated port: 07cc9161-e617-439f-97b5-70331464cd31 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1233.148073] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20986fc4-2118-47a7-89fa-86a6ecef66dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.157706] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6426f057-3223-43b3-bfc5-44296da33390 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.175862] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.176437] env[63297]: DEBUG nova.compute.provider_tree [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.200963] env[63297]: DEBUG nova.compute.manager [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1233.201890] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5867b4b7-e49d-442c-b071-fcd21c39f753 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.386920] env[63297]: DEBUG nova.compute.manager [req-805a1605-4f48-4875-953a-7700cb757fb8 req-b5f259ae-b67e-41c3-a63a-1e22b91e85ae service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Received event network-vif-plugged-07cc9161-e617-439f-97b5-70331464cd31 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1233.387141] env[63297]: DEBUG oslo_concurrency.lockutils [req-805a1605-4f48-4875-953a-7700cb757fb8 req-b5f259ae-b67e-41c3-a63a-1e22b91e85ae service nova] Acquiring lock "459d5a17-182b-4284-b464-57d342981031-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.388411] env[63297]: DEBUG oslo_concurrency.lockutils [req-805a1605-4f48-4875-953a-7700cb757fb8 req-b5f259ae-b67e-41c3-a63a-1e22b91e85ae service nova] Lock "459d5a17-182b-4284-b464-57d342981031-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.388411] env[63297]: DEBUG oslo_concurrency.lockutils [req-805a1605-4f48-4875-953a-7700cb757fb8 req-b5f259ae-b67e-41c3-a63a-1e22b91e85ae service nova] Lock "459d5a17-182b-4284-b464-57d342981031-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.388411] env[63297]: DEBUG nova.compute.manager [req-805a1605-4f48-4875-953a-7700cb757fb8 req-b5f259ae-b67e-41c3-a63a-1e22b91e85ae service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] No waiting events found dispatching network-vif-plugged-07cc9161-e617-439f-97b5-70331464cd31 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1233.388411] env[63297]: WARNING nova.compute.manager [req-805a1605-4f48-4875-953a-7700cb757fb8 req-b5f259ae-b67e-41c3-a63a-1e22b91e85ae service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Received unexpected event network-vif-plugged-07cc9161-e617-439f-97b5-70331464cd31 for instance with vm_state building and task_state spawning. [ 1233.551650] env[63297]: DEBUG oslo_vmware.api [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Task: {'id': task-1696972, 'name': PowerOnVM_Task, 'duration_secs': 0.569107} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.552282] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1233.553141] env[63297]: DEBUG nova.compute.manager [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1233.554582] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb160fe7-0309-4566-a104-5b52121914ff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.600255] env[63297]: DEBUG oslo_vmware.api [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696973, 'name': PowerOnVM_Task, 'duration_secs': 0.713514} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.600598] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1233.600891] env[63297]: INFO nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Took 11.44 seconds to spawn the instance on the hypervisor. [ 1233.601094] env[63297]: DEBUG nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1233.601946] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6978f8b5-12f1-42d4-a673-37ad4bc8fad2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.652456] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquiring lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.652619] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquired lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.652751] env[63297]: DEBUG nova.network.neutron [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1233.655303] env[63297]: DEBUG nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1233.680650] env[63297]: DEBUG nova.scheduler.client.report [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1233.688410] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1233.688810] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1233.689113] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1233.689416] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1233.689858] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1233.690289] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1233.690663] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1233.690934] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1233.693019] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1233.693019] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1233.693019] env[63297]: DEBUG nova.virt.hardware [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1233.693019] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40555621-9a40-4f9d-89b3-a1754c2f834c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.701997] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792a051e-9d5b-4eb8-be3c-60fdbb444b32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.720306] env[63297]: INFO nova.compute.manager [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] instance snapshotting [ 1233.723537] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637bcfe4-ce88-43ef-9ef0-67c07a9231e7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.745739] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a7feed-e4a7-46c5-b8e5-385a3a8cd416 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.909420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "581f9d48-dcb8-4a34-928b-64087a9f966b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.909621] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "581f9d48-dcb8-4a34-928b-64087a9f966b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.075930] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.119013] env[63297]: INFO nova.compute.manager [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Took 35.54 seconds to build instance. [ 1234.188240] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.188240] env[63297]: DEBUG nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1234.190755] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.587s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.192200] env[63297]: INFO nova.compute.claims [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1234.215431] env[63297]: DEBUG nova.network.neutron [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1234.256763] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1234.257076] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-13f89343-26f4-4926-a7e8-797a39a0fd73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.264762] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1234.264762] env[63297]: value = "task-1696974" [ 1234.264762] env[63297]: _type = "Task" [ 1234.264762] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.274682] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696974, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.505656] env[63297]: DEBUG nova.network.neutron [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Updating instance_info_cache with network_info: [{"id": "07cc9161-e617-439f-97b5-70331464cd31", "address": "fa:16:3e:2b:c6:74", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07cc9161-e6", "ovs_interfaceid": "07cc9161-e617-439f-97b5-70331464cd31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.622340] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d9dfc07-5300-4851-870b-8f53a6287b20 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.862s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.700799] env[63297]: DEBUG nova.compute.utils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1234.708625] env[63297]: DEBUG nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1234.708858] env[63297]: DEBUG nova.network.neutron [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1234.721942] env[63297]: DEBUG nova.network.neutron [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Successfully updated port: ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1234.770235] env[63297]: DEBUG nova.compute.manager [req-3fdcfcdf-b801-49fa-bcef-4a0b0e38e636 req-ffd7668a-e094-4189-916d-56bf4ed4b16f service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Received event network-vif-plugged-ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1234.770235] env[63297]: DEBUG oslo_concurrency.lockutils [req-3fdcfcdf-b801-49fa-bcef-4a0b0e38e636 req-ffd7668a-e094-4189-916d-56bf4ed4b16f service nova] Acquiring lock "754e64ec-b6fa-49d8-9de6-ef38918378fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.770235] env[63297]: DEBUG oslo_concurrency.lockutils [req-3fdcfcdf-b801-49fa-bcef-4a0b0e38e636 req-ffd7668a-e094-4189-916d-56bf4ed4b16f service nova] Lock "754e64ec-b6fa-49d8-9de6-ef38918378fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.770235] env[63297]: DEBUG oslo_concurrency.lockutils [req-3fdcfcdf-b801-49fa-bcef-4a0b0e38e636 req-ffd7668a-e094-4189-916d-56bf4ed4b16f service nova] Lock "754e64ec-b6fa-49d8-9de6-ef38918378fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.770235] env[63297]: DEBUG nova.compute.manager [req-3fdcfcdf-b801-49fa-bcef-4a0b0e38e636 req-ffd7668a-e094-4189-916d-56bf4ed4b16f service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] No waiting events found dispatching network-vif-plugged-ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1234.770610] env[63297]: WARNING nova.compute.manager [req-3fdcfcdf-b801-49fa-bcef-4a0b0e38e636 req-ffd7668a-e094-4189-916d-56bf4ed4b16f service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Received unexpected event network-vif-plugged-ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 for instance with vm_state building and task_state spawning. [ 1234.784921] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696974, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.785505] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.785842] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.786444] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.786444] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.786586] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.788467] env[63297]: INFO nova.compute.manager [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Terminating instance [ 1234.791518] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "refresh_cache-13706c85-c23e-47cd-a7d8-2e902c11a7fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.791794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquired lock "refresh_cache-13706c85-c23e-47cd-a7d8-2e902c11a7fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.791861] env[63297]: DEBUG nova.network.neutron [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1234.819859] env[63297]: DEBUG nova.policy [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8767f029ef2847acb8679c8dda841e61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de74a055696b4dd69b88d08b52d327d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1235.012758] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Releasing lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.012758] env[63297]: DEBUG nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Instance network_info: |[{"id": "07cc9161-e617-439f-97b5-70331464cd31", "address": "fa:16:3e:2b:c6:74", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07cc9161-e6", "ovs_interfaceid": "07cc9161-e617-439f-97b5-70331464cd31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1235.012758] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:c6:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07cc9161-e617-439f-97b5-70331464cd31', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1235.022121] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Creating folder: Project (2dd93edf6aee452280b8b7c441b75d38). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1235.023261] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06521a74-5fef-4574-8423-a7e3b334227d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.035820] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Created folder: Project (2dd93edf6aee452280b8b7c441b75d38) in parent group-v353718. [ 1235.038020] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Creating folder: Instances. Parent ref: group-v353777. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1235.038020] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-368b32a4-482a-42f3-87f0-64ce02c87eed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.049126] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Created folder: Instances in parent group-v353777. [ 1235.049126] env[63297]: DEBUG oslo.service.loopingcall [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1235.049126] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 459d5a17-182b-4284-b464-57d342981031] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1235.049126] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-176ccb94-b699-4c0c-8c0f-034eb58e8aa8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.071570] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1235.071570] env[63297]: value = "task-1696977" [ 1235.071570] env[63297]: _type = "Task" [ 1235.071570] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.081602] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696977, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.126325] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1235.209521] env[63297]: DEBUG nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1235.228169] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquiring lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.228169] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquired lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.228169] env[63297]: DEBUG nova.network.neutron [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1235.243367] env[63297]: DEBUG nova.network.neutron [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Successfully created port: 603a207a-5c56-4835-a1be-961da01f6f07 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1235.279379] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696974, 'name': CreateSnapshot_Task, 'duration_secs': 0.52857} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.279648] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1235.281664] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed497c1-7c35-4397-8116-d32264a51add {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.351029] env[63297]: DEBUG nova.network.neutron [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1235.503540] env[63297]: DEBUG nova.network.neutron [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.589098] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696977, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.655154] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.760973] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e9ff6e-2513-43ab-9551-9815a62e6047 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.772230] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240164ab-6ad9-4de0-8a92-46b292400e9b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.812548] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1235.813829] env[63297]: DEBUG nova.network.neutron [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1235.816178] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1ffb529e-06d9-4a5c-920c-e639579c0697 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.821047] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1b3a07-8d31-495a-8019-93012785fc17 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.837314] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ff043e-703e-4642-be42-505358b64407 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.841324] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1235.841324] env[63297]: value = "task-1696978" [ 1235.841324] env[63297]: _type = "Task" [ 1235.841324] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.853715] env[63297]: DEBUG nova.compute.provider_tree [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1235.863198] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696978, 'name': CloneVM_Task} progress is 11%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.980448] env[63297]: DEBUG nova.compute.manager [req-550a0827-d910-4a88-bbde-42bd0e67d248 req-c539dc02-f5a9-450a-a49b-00289b540550 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Received event network-changed-07cc9161-e617-439f-97b5-70331464cd31 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1235.980656] env[63297]: DEBUG nova.compute.manager [req-550a0827-d910-4a88-bbde-42bd0e67d248 req-c539dc02-f5a9-450a-a49b-00289b540550 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Refreshing instance network info cache due to event network-changed-07cc9161-e617-439f-97b5-70331464cd31. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1235.981342] env[63297]: DEBUG oslo_concurrency.lockutils [req-550a0827-d910-4a88-bbde-42bd0e67d248 req-c539dc02-f5a9-450a-a49b-00289b540550 service nova] Acquiring lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.981508] env[63297]: DEBUG oslo_concurrency.lockutils [req-550a0827-d910-4a88-bbde-42bd0e67d248 req-c539dc02-f5a9-450a-a49b-00289b540550 service nova] Acquired lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.981667] env[63297]: DEBUG nova.network.neutron [req-550a0827-d910-4a88-bbde-42bd0e67d248 req-c539dc02-f5a9-450a-a49b-00289b540550 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Refreshing network info cache for port 07cc9161-e617-439f-97b5-70331464cd31 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1236.006717] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Releasing lock "refresh_cache-13706c85-c23e-47cd-a7d8-2e902c11a7fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.009138] env[63297]: DEBUG nova.compute.manager [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1236.009138] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1236.009138] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1399c5-4cb0-41b9-bdd6-155c084018e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.018179] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.018554] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-276b737c-55ec-4bdf-bc20-1d861cb4be0c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.027731] env[63297]: DEBUG oslo_vmware.api [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1236.027731] env[63297]: value = "task-1696979" [ 1236.027731] env[63297]: _type = "Task" [ 1236.027731] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.038892] env[63297]: DEBUG oslo_vmware.api [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.083590] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696977, 'name': CreateVM_Task, 'duration_secs': 0.584127} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.085642] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 459d5a17-182b-4284-b464-57d342981031] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1236.085642] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.085642] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.085642] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1236.086344] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0603634a-2760-46fc-9d2f-624b6da95c67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.092620] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1236.092620] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523efb1d-f127-0d40-db2a-650c7955a130" [ 1236.092620] env[63297]: _type = "Task" [ 1236.092620] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.100015] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523efb1d-f127-0d40-db2a-650c7955a130, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.221837] env[63297]: DEBUG nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1236.247509] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1236.247737] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1236.247894] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1236.248091] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1236.248239] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1236.248383] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1236.248588] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1236.248769] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1236.248950] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1236.249126] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1236.249304] env[63297]: DEBUG nova.virt.hardware [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1236.250277] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211196b7-98c9-4818-94fb-ff2fa367f6e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.258769] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a75f539-78d5-465e-af23-7b8b8b7c5cb3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.329144] env[63297]: DEBUG nova.network.neutron [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Updating instance_info_cache with network_info: [{"id": "ea7a3748-62ea-4bc1-b3ba-0da9fab212c3", "address": "fa:16:3e:53:81:f5", "network": {"id": "9301f145-9740-4669-9a28-38fb263d89d2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1155086179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e7e471b2638428080e76e0d2b740da4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea7a3748-62", "ovs_interfaceid": "ea7a3748-62ea-4bc1-b3ba-0da9fab212c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.355432] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696978, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.359639] env[63297]: DEBUG nova.scheduler.client.report [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1236.543928] env[63297]: DEBUG oslo_vmware.api [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696979, 'name': PowerOffVM_Task, 'duration_secs': 0.176446} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.544671] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.545242] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1236.545589] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ba67132-0238-42f1-abe3-ed9ec7e74cb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.575891] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1236.575891] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1236.575891] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Deleting the datastore file [datastore1] 13706c85-c23e-47cd-a7d8-2e902c11a7fb {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.576542] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5481742d-dc37-4164-8436-05198fb07962 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.584273] env[63297]: DEBUG oslo_vmware.api [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for the task: (returnval){ [ 1236.584273] env[63297]: value = "task-1696981" [ 1236.584273] env[63297]: _type = "Task" [ 1236.584273] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.596013] env[63297]: DEBUG oslo_vmware.api [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.610711] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523efb1d-f127-0d40-db2a-650c7955a130, 'name': SearchDatastore_Task, 'duration_secs': 0.012438} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.610711] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.610711] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1236.610711] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.610711] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.610711] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.610711] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b91d0aec-610f-4067-8d37-720908b650bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.622744] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.623081] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1236.624873] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a3d4ec5-e80b-4ffe-a5ba-375a6134e3f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.633318] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1236.633318] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bdadfe-9bb8-41f4-428e-612a9b71c647" [ 1236.633318] env[63297]: _type = "Task" [ 1236.633318] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.643822] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bdadfe-9bb8-41f4-428e-612a9b71c647, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.832704] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Releasing lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.833043] env[63297]: DEBUG nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Instance network_info: |[{"id": "ea7a3748-62ea-4bc1-b3ba-0da9fab212c3", "address": "fa:16:3e:53:81:f5", "network": {"id": "9301f145-9740-4669-9a28-38fb263d89d2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1155086179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e7e471b2638428080e76e0d2b740da4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea7a3748-62", "ovs_interfaceid": "ea7a3748-62ea-4bc1-b3ba-0da9fab212c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1236.833520] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:81:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea7a3748-62ea-4bc1-b3ba-0da9fab212c3', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1236.841392] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Creating folder: Project (5e7e471b2638428080e76e0d2b740da4). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1236.841645] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f9ec60c-bde9-4080-ad87-e2b8610a2062 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.852872] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696978, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.854648] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Created folder: Project (5e7e471b2638428080e76e0d2b740da4) in parent group-v353718. [ 1236.854828] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Creating folder: Instances. Parent ref: group-v353781. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1236.855069] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d98d597a-5685-4824-865d-5f60e3d05feb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.863680] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Created folder: Instances in parent group-v353781. [ 1236.863995] env[63297]: DEBUG oslo.service.loopingcall [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1236.864250] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1236.864498] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32204449-9d1d-493c-b672-5fb1b1cea2d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.880696] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1236.880999] env[63297]: DEBUG nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1236.886734] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.196s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.888320] env[63297]: INFO nova.compute.claims [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1236.896573] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1236.896573] env[63297]: value = "task-1696984" [ 1236.896573] env[63297]: _type = "Task" [ 1236.896573] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.906250] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696984, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.080955] env[63297]: DEBUG nova.network.neutron [req-550a0827-d910-4a88-bbde-42bd0e67d248 req-c539dc02-f5a9-450a-a49b-00289b540550 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Updated VIF entry in instance network info cache for port 07cc9161-e617-439f-97b5-70331464cd31. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1237.080955] env[63297]: DEBUG nova.network.neutron [req-550a0827-d910-4a88-bbde-42bd0e67d248 req-c539dc02-f5a9-450a-a49b-00289b540550 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Updating instance_info_cache with network_info: [{"id": "07cc9161-e617-439f-97b5-70331464cd31", "address": "fa:16:3e:2b:c6:74", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07cc9161-e6", "ovs_interfaceid": "07cc9161-e617-439f-97b5-70331464cd31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.094272] env[63297]: DEBUG oslo_vmware.api [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Task: {'id': task-1696981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113139} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.095619] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1237.095958] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1237.096318] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1237.096610] env[63297]: INFO nova.compute.manager [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1237.099022] env[63297]: DEBUG oslo.service.loopingcall [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1237.099022] env[63297]: DEBUG nova.compute.manager [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1237.099022] env[63297]: DEBUG nova.network.neutron [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1237.129293] env[63297]: DEBUG nova.network.neutron [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1237.151160] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bdadfe-9bb8-41f4-428e-612a9b71c647, 'name': SearchDatastore_Task, 'duration_secs': 0.012195} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.152113] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0454333-ce50-46f8-bbbe-74b528f77365 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.158273] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1237.158273] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526953c2-1779-7527-1bb9-2bdd115de7f8" [ 1237.158273] env[63297]: _type = "Task" [ 1237.158273] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.173403] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526953c2-1779-7527-1bb9-2bdd115de7f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.361328] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696978, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.411819] env[63297]: DEBUG nova.compute.utils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1237.411819] env[63297]: DEBUG nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1237.411819] env[63297]: DEBUG nova.network.neutron [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1237.414666] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696984, 'name': CreateVM_Task, 'duration_secs': 0.493727} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.414666] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1237.415605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.415605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.415605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1237.415605] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70082892-baba-4260-af26-849ae7ade380 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.427944] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1237.427944] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5215b0a3-363d-aa1f-f12b-3526d5e75dff" [ 1237.427944] env[63297]: _type = "Task" [ 1237.427944] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.439566] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5215b0a3-363d-aa1f-f12b-3526d5e75dff, 'name': SearchDatastore_Task, 'duration_secs': 0.009754} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.439566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.439566] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1237.439566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.480015] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "fba9040d-f904-44a1-8785-14d4696ea939" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.480353] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.529567] env[63297]: DEBUG nova.compute.manager [req-84cc24c0-6387-497b-ae52-b642589b1c78 req-e4efe12f-7b8e-478a-a283-6b0651c3f94d service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Received event network-changed-ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1237.529567] env[63297]: DEBUG nova.compute.manager [req-84cc24c0-6387-497b-ae52-b642589b1c78 req-e4efe12f-7b8e-478a-a283-6b0651c3f94d service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Refreshing instance network info cache due to event network-changed-ea7a3748-62ea-4bc1-b3ba-0da9fab212c3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1237.529567] env[63297]: DEBUG oslo_concurrency.lockutils [req-84cc24c0-6387-497b-ae52-b642589b1c78 req-e4efe12f-7b8e-478a-a283-6b0651c3f94d service nova] Acquiring lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.529567] env[63297]: DEBUG oslo_concurrency.lockutils [req-84cc24c0-6387-497b-ae52-b642589b1c78 req-e4efe12f-7b8e-478a-a283-6b0651c3f94d service nova] Acquired lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.530967] env[63297]: DEBUG nova.network.neutron [req-84cc24c0-6387-497b-ae52-b642589b1c78 req-e4efe12f-7b8e-478a-a283-6b0651c3f94d service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Refreshing network info cache for port ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1237.583617] env[63297]: DEBUG oslo_concurrency.lockutils [req-550a0827-d910-4a88-bbde-42bd0e67d248 req-c539dc02-f5a9-450a-a49b-00289b540550 service nova] Releasing lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.603219] env[63297]: DEBUG nova.policy [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '648e6fdbc1d5460883e4c876a3273d41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39986503166b4d44a424102c6c528225', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1237.621811] env[63297]: DEBUG nova.network.neutron [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Successfully updated port: 603a207a-5c56-4835-a1be-961da01f6f07 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.633832] env[63297]: DEBUG nova.network.neutron [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.670776] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526953c2-1779-7527-1bb9-2bdd115de7f8, 'name': SearchDatastore_Task, 'duration_secs': 0.012502} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.670776] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.670776] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 459d5a17-182b-4284-b464-57d342981031/459d5a17-182b-4284-b464-57d342981031.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1237.670776] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.671611] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1237.671611] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77834007-ae97-4d6d-bbac-f62efdefd826 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.673307] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68c1903a-6da5-423d-8ec9-08e0f4445bc9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.676607] env[63297]: DEBUG nova.compute.manager [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1237.678934] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e460440d-7996-4e30-a6ac-887e348e3e8d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.692878] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1237.692878] env[63297]: value = "task-1696985" [ 1237.692878] env[63297]: _type = "Task" [ 1237.692878] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.694324] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1237.694505] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1237.698511] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b964149a-192d-4a4c-9342-b30ca7d4870b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.708385] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.708498] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1237.708498] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5232568d-822e-42b3-1317-67bd6baccc07" [ 1237.708498] env[63297]: _type = "Task" [ 1237.708498] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.718747] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5232568d-822e-42b3-1317-67bd6baccc07, 'name': SearchDatastore_Task, 'duration_secs': 0.010003} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.720118] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ecc3774-e329-4ca9-996f-6f07679e9598 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.726049] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1237.726049] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d30fc-da5b-f77f-fbbb-00801680148c" [ 1237.726049] env[63297]: _type = "Task" [ 1237.726049] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.735430] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d30fc-da5b-f77f-fbbb-00801680148c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.855755] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1696978, 'name': CloneVM_Task, 'duration_secs': 1.82655} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.856226] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Created linked-clone VM from snapshot [ 1237.857385] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8361a12a-be84-427d-a9cd-e3e65bcd91b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.866483] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Uploading image 136bd841-b3df-4cec-b1d6-05f6102c5ab8 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1237.895230] env[63297]: DEBUG oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1237.895230] env[63297]: value = "vm-353780" [ 1237.895230] env[63297]: _type = "VirtualMachine" [ 1237.895230] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1237.899690] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d5442945-1468-41f8-b5f4-9a40e3f849ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.900648] env[63297]: DEBUG nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1237.913036] env[63297]: DEBUG oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lease: (returnval){ [ 1237.913036] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e1079f-3931-33a9-b3f2-ddaee0caada0" [ 1237.913036] env[63297]: _type = "HttpNfcLease" [ 1237.913036] env[63297]: } obtained for exporting VM: (result){ [ 1237.913036] env[63297]: value = "vm-353780" [ 1237.913036] env[63297]: _type = "VirtualMachine" [ 1237.913036] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1237.913425] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the lease: (returnval){ [ 1237.913425] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e1079f-3931-33a9-b3f2-ddaee0caada0" [ 1237.913425] env[63297]: _type = "HttpNfcLease" [ 1237.913425] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1237.923768] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1237.923768] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e1079f-3931-33a9-b3f2-ddaee0caada0" [ 1237.923768] env[63297]: _type = "HttpNfcLease" [ 1237.923768] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1238.124946] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "refresh_cache-b14e8466-68ab-4705-a439-6db961a149b0" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.125212] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "refresh_cache-b14e8466-68ab-4705-a439-6db961a149b0" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.126809] env[63297]: DEBUG nova.network.neutron [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1238.137129] env[63297]: INFO nova.compute.manager [-] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Took 1.04 seconds to deallocate network for instance. [ 1238.167744] env[63297]: DEBUG nova.network.neutron [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Successfully created port: 15254c40-f75e-453c-858a-af927dddc6c7 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1238.194603] env[63297]: INFO nova.compute.manager [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] instance snapshotting [ 1238.204389] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8051e5e5-5b3a-45e8-9762-bae312649f6b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.228538] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.230569] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd66e7f4-4bec-4315-af71-c8b3bcd188b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.242839] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d30fc-da5b-f77f-fbbb-00801680148c, 'name': SearchDatastore_Task, 'duration_secs': 0.009311} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.246866] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.247166] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 754e64ec-b6fa-49d8-9de6-ef38918378fd/754e64ec-b6fa-49d8-9de6-ef38918378fd.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1238.252396] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f774f8e-84fc-4e24-a7a5-26a837e11c54 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.259669] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1238.259669] env[63297]: value = "task-1696987" [ 1238.259669] env[63297]: _type = "Task" [ 1238.259669] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.275684] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.306517] env[63297]: DEBUG nova.compute.manager [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Received event network-vif-plugged-603a207a-5c56-4835-a1be-961da01f6f07 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1238.307025] env[63297]: DEBUG oslo_concurrency.lockutils [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] Acquiring lock "b14e8466-68ab-4705-a439-6db961a149b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.307025] env[63297]: DEBUG oslo_concurrency.lockutils [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] Lock "b14e8466-68ab-4705-a439-6db961a149b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.307215] env[63297]: DEBUG oslo_concurrency.lockutils [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] Lock "b14e8466-68ab-4705-a439-6db961a149b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.307261] env[63297]: DEBUG nova.compute.manager [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] No waiting events found dispatching network-vif-plugged-603a207a-5c56-4835-a1be-961da01f6f07 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1238.307420] env[63297]: WARNING nova.compute.manager [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Received unexpected event network-vif-plugged-603a207a-5c56-4835-a1be-961da01f6f07 for instance with vm_state building and task_state spawning. [ 1238.307572] env[63297]: DEBUG nova.compute.manager [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Received event network-changed-603a207a-5c56-4835-a1be-961da01f6f07 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1238.307750] env[63297]: DEBUG nova.compute.manager [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Refreshing instance network info cache due to event network-changed-603a207a-5c56-4835-a1be-961da01f6f07. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1238.308172] env[63297]: DEBUG oslo_concurrency.lockutils [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] Acquiring lock "refresh_cache-b14e8466-68ab-4705-a439-6db961a149b0" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.422738] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1238.422738] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e1079f-3931-33a9-b3f2-ddaee0caada0" [ 1238.422738] env[63297]: _type = "HttpNfcLease" [ 1238.422738] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1238.423045] env[63297]: DEBUG oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1238.423045] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e1079f-3931-33a9-b3f2-ddaee0caada0" [ 1238.423045] env[63297]: _type = "HttpNfcLease" [ 1238.423045] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1238.423889] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e50d1a-56c3-4fb3-abbd-312aaecae477 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.432648] env[63297]: DEBUG oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255dec0-3fa2-9762-c115-33a888528364/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1238.433092] env[63297]: DEBUG oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255dec0-3fa2-9762-c115-33a888528364/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1238.536858] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-38cda89c-1364-4121-a9a6-85c6f6a41e33 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.550198] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e845c2-7959-45a7-8ee6-f6f19f0b5c3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.562225] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611e7d18-dce4-4f62-95f3-ac6285ed1df3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.601275] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d9dc91-4290-4195-b88c-eee79af4a5bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.611087] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2844e7bd-4342-4b7b-a412-009be7afebcc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.641896] env[63297]: DEBUG nova.compute.provider_tree [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.644847] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.719316] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696985, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.743876] env[63297]: DEBUG nova.network.neutron [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1238.758764] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1238.759343] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3986d97b-7221-463d-9a25-6a5b347f537d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.774836] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696987, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.776910] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1238.776910] env[63297]: value = "task-1696988" [ 1238.776910] env[63297]: _type = "Task" [ 1238.776910] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.793034] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696988, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.912616] env[63297]: DEBUG nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1238.950487] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1238.950809] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1238.951040] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1238.956021] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1238.956547] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1238.956547] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1238.956773] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1238.956972] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1238.957295] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1238.957363] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1238.957539] env[63297]: DEBUG nova.virt.hardware [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1238.958605] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac648802-45be-4823-944f-7ddb908bb79e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.968165] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4d6065-7144-4b95-8a2e-3fa749b9cf52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.013095] env[63297]: DEBUG nova.network.neutron [req-84cc24c0-6387-497b-ae52-b642589b1c78 req-e4efe12f-7b8e-478a-a283-6b0651c3f94d service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Updated VIF entry in instance network info cache for port ea7a3748-62ea-4bc1-b3ba-0da9fab212c3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1239.013095] env[63297]: DEBUG nova.network.neutron [req-84cc24c0-6387-497b-ae52-b642589b1c78 req-e4efe12f-7b8e-478a-a283-6b0651c3f94d service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Updating instance_info_cache with network_info: [{"id": "ea7a3748-62ea-4bc1-b3ba-0da9fab212c3", "address": "fa:16:3e:53:81:f5", "network": {"id": "9301f145-9740-4669-9a28-38fb263d89d2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1155086179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e7e471b2638428080e76e0d2b740da4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea7a3748-62", "ovs_interfaceid": "ea7a3748-62ea-4bc1-b3ba-0da9fab212c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.130055] env[63297]: DEBUG nova.network.neutron [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Updating instance_info_cache with network_info: [{"id": "603a207a-5c56-4835-a1be-961da01f6f07", "address": "fa:16:3e:cd:73:33", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap603a207a-5c", "ovs_interfaceid": "603a207a-5c56-4835-a1be-961da01f6f07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.147688] env[63297]: DEBUG nova.scheduler.client.report [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1239.213679] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696985, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.339917} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.213956] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 459d5a17-182b-4284-b464-57d342981031/459d5a17-182b-4284-b464-57d342981031.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1239.215198] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1239.215518] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b23a065-cafa-498f-a2b1-44dfd31d0de6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.226433] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1239.226433] env[63297]: value = "task-1696989" [ 1239.226433] env[63297]: _type = "Task" [ 1239.226433] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.237723] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696989, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.275343] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696987, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766306} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.275475] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 754e64ec-b6fa-49d8-9de6-ef38918378fd/754e64ec-b6fa-49d8-9de6-ef38918378fd.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1239.275714] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1239.276085] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bd33cc4-6831-4e37-881c-39e6a6f60e75 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.290465] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696988, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.292303] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1239.292303] env[63297]: value = "task-1696990" [ 1239.292303] env[63297]: _type = "Task" [ 1239.292303] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.302979] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696990, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.519264] env[63297]: DEBUG oslo_concurrency.lockutils [req-84cc24c0-6387-497b-ae52-b642589b1c78 req-e4efe12f-7b8e-478a-a283-6b0651c3f94d service nova] Releasing lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.636121] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "refresh_cache-b14e8466-68ab-4705-a439-6db961a149b0" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.636599] env[63297]: DEBUG nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance network_info: |[{"id": "603a207a-5c56-4835-a1be-961da01f6f07", "address": "fa:16:3e:cd:73:33", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap603a207a-5c", "ovs_interfaceid": "603a207a-5c56-4835-a1be-961da01f6f07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1239.636927] env[63297]: DEBUG oslo_concurrency.lockutils [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] Acquired lock "refresh_cache-b14e8466-68ab-4705-a439-6db961a149b0" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.639021] env[63297]: DEBUG nova.network.neutron [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Refreshing network info cache for port 603a207a-5c56-4835-a1be-961da01f6f07 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1239.639021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:73:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '603a207a-5c56-4835-a1be-961da01f6f07', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1239.656376] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Creating folder: Project (de74a055696b4dd69b88d08b52d327d1). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1239.663287] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.777s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.665056] env[63297]: DEBUG nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1239.668199] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5522c859-715e-471d-a8d7-947ef7eff42e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.673547] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 25.354s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.690018] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Created folder: Project (de74a055696b4dd69b88d08b52d327d1) in parent group-v353718. [ 1239.690975] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Creating folder: Instances. Parent ref: group-v353784. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1239.690975] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b7cc841-7089-4334-ba51-f045b93f1b41 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.706217] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Created folder: Instances in parent group-v353784. [ 1239.706217] env[63297]: DEBUG oslo.service.loopingcall [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1239.706217] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1239.706217] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d5ed6ba-e8b0-4f71-922c-ae65c92ef15f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.732409] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1239.732409] env[63297]: value = "task-1696993" [ 1239.732409] env[63297]: _type = "Task" [ 1239.732409] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.735434] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696989, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075252} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.738780] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1239.739625] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cd7a2a-9c73-465e-b3af-1e99fcd2de58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.747816] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696993, 'name': CreateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.767021] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 459d5a17-182b-4284-b464-57d342981031/459d5a17-182b-4284-b464-57d342981031.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1239.768033] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed254c24-7eff-436a-9eec-ef3f738d4a91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.793079] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696988, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.794769] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1239.794769] env[63297]: value = "task-1696994" [ 1239.794769] env[63297]: _type = "Task" [ 1239.794769] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.810035] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696994, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.810406] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696990, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162406} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.813119] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1239.814046] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3707510e-ad5a-40c5-be87-e25a468c6f70 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.838812] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 754e64ec-b6fa-49d8-9de6-ef38918378fd/754e64ec-b6fa-49d8-9de6-ef38918378fd.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1239.841599] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2a7f783-4a5b-495b-9bbb-394e54ad3aa9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.864466] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1239.864466] env[63297]: value = "task-1696995" [ 1239.864466] env[63297]: _type = "Task" [ 1239.864466] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.874758] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696995, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.170327] env[63297]: DEBUG nova.network.neutron [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Updated VIF entry in instance network info cache for port 603a207a-5c56-4835-a1be-961da01f6f07. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1240.170843] env[63297]: DEBUG nova.network.neutron [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Updating instance_info_cache with network_info: [{"id": "603a207a-5c56-4835-a1be-961da01f6f07", "address": "fa:16:3e:cd:73:33", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap603a207a-5c", "ovs_interfaceid": "603a207a-5c56-4835-a1be-961da01f6f07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.175719] env[63297]: DEBUG nova.compute.utils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1240.178319] env[63297]: DEBUG nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1240.178698] env[63297]: DEBUG nova.network.neutron [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1240.185684] env[63297]: INFO nova.compute.claims [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1240.196562] env[63297]: DEBUG nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1240.247021] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1696993, 'name': CreateVM_Task, 'duration_secs': 0.45813} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.247210] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1240.247998] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.248217] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.248826] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1240.249113] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa25c89f-cb1c-47b8-aed4-b1430503cb12 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.254468] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1240.254468] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5298cdd7-62cb-90c1-0d27-c3c4549851d5" [ 1240.254468] env[63297]: _type = "Task" [ 1240.254468] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.264500] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5298cdd7-62cb-90c1-0d27-c3c4549851d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.293188] env[63297]: DEBUG nova.policy [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8767f029ef2847acb8679c8dda841e61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de74a055696b4dd69b88d08b52d327d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1240.298161] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696988, 'name': CreateSnapshot_Task, 'duration_secs': 1.28977} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.301850] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1240.302736] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde99cfa-f9d2-48c0-bf65-b230bfa5d7c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.310850] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696994, 'name': ReconfigVM_Task, 'duration_secs': 0.443264} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.316145] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 459d5a17-182b-4284-b464-57d342981031/459d5a17-182b-4284-b464-57d342981031.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1240.316820] env[63297]: DEBUG nova.compute.manager [req-65986181-bb95-401f-952e-eb262110b433 req-0f46381e-9c47-4227-b6f7-8c425b5ad292 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Received event network-vif-plugged-15254c40-f75e-453c-858a-af927dddc6c7 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1240.317024] env[63297]: DEBUG oslo_concurrency.lockutils [req-65986181-bb95-401f-952e-eb262110b433 req-0f46381e-9c47-4227-b6f7-8c425b5ad292 service nova] Acquiring lock "f3a579de-1f29-4b67-8dc8-07ea37267001-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.317240] env[63297]: DEBUG oslo_concurrency.lockutils [req-65986181-bb95-401f-952e-eb262110b433 req-0f46381e-9c47-4227-b6f7-8c425b5ad292 service nova] Lock "f3a579de-1f29-4b67-8dc8-07ea37267001-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.317402] env[63297]: DEBUG oslo_concurrency.lockutils [req-65986181-bb95-401f-952e-eb262110b433 req-0f46381e-9c47-4227-b6f7-8c425b5ad292 service nova] Lock "f3a579de-1f29-4b67-8dc8-07ea37267001-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.317809] env[63297]: DEBUG nova.compute.manager [req-65986181-bb95-401f-952e-eb262110b433 req-0f46381e-9c47-4227-b6f7-8c425b5ad292 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] No waiting events found dispatching network-vif-plugged-15254c40-f75e-453c-858a-af927dddc6c7 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1240.317809] env[63297]: WARNING nova.compute.manager [req-65986181-bb95-401f-952e-eb262110b433 req-0f46381e-9c47-4227-b6f7-8c425b5ad292 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Received unexpected event network-vif-plugged-15254c40-f75e-453c-858a-af927dddc6c7 for instance with vm_state building and task_state spawning. [ 1240.322294] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-754f6c3e-04bb-4270-a8f2-64d8fb88a322 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.341026] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1240.341026] env[63297]: value = "task-1696996" [ 1240.341026] env[63297]: _type = "Task" [ 1240.341026] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.349395] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696996, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.375942] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696995, 'name': ReconfigVM_Task, 'duration_secs': 0.313085} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.376397] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 754e64ec-b6fa-49d8-9de6-ef38918378fd/754e64ec-b6fa-49d8-9de6-ef38918378fd.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1240.376960] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63c09f16-ff72-431b-a4ca-f379af5b9eee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.383512] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1240.383512] env[63297]: value = "task-1696997" [ 1240.383512] env[63297]: _type = "Task" [ 1240.383512] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.394140] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696997, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.482112] env[63297]: DEBUG nova.network.neutron [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Successfully updated port: 15254c40-f75e-453c-858a-af927dddc6c7 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1240.675282] env[63297]: DEBUG oslo_concurrency.lockutils [req-a063be40-822b-4280-ada1-a11c50e48aa6 req-29a66fe4-ac31-43f1-adfc-644f65d583d1 service nova] Releasing lock "refresh_cache-b14e8466-68ab-4705-a439-6db961a149b0" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.695555] env[63297]: INFO nova.compute.resource_tracker [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating resource usage from migration 4720dc5e-30b9-4f95-8509-ba35c8eb57ef [ 1240.770517] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5298cdd7-62cb-90c1-0d27-c3c4549851d5, 'name': SearchDatastore_Task, 'duration_secs': 0.00961} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.770517] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.770517] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.770693] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.770745] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.770894] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.771712] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f37dd818-df60-4a85-9439-9d1535c3514e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.781776] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.781776] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1240.781986] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a3adc8-a48e-43b9-862e-a365c5871b86 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.794669] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1240.794669] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5235780a-cac8-4b9a-1ed2-c517f7d76a62" [ 1240.794669] env[63297]: _type = "Task" [ 1240.794669] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.806737] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5235780a-cac8-4b9a-1ed2-c517f7d76a62, 'name': SearchDatastore_Task, 'duration_secs': 0.009727} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.807918] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d93fc36a-762b-4774-875c-49b878818218 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.817251] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1240.817251] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524513aa-a3df-0341-5895-59a15248fc24" [ 1240.817251] env[63297]: _type = "Task" [ 1240.817251] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.826920] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524513aa-a3df-0341-5895-59a15248fc24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.841291] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1240.844437] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fe065d6e-234d-4714-8a11-472335e41984 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.859653] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696996, 'name': Rename_Task, 'duration_secs': 0.198332} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.860388] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1240.860723] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1240.860723] env[63297]: value = "task-1696998" [ 1240.860723] env[63297]: _type = "Task" [ 1240.860723] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.860923] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a7a9d55-e34a-4845-8feb-d9f9e18f9d9c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.874187] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696998, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.878794] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1240.878794] env[63297]: value = "task-1696999" [ 1240.878794] env[63297]: _type = "Task" [ 1240.878794] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.891577] env[63297]: DEBUG nova.network.neutron [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Successfully created port: 1c87cf43-e466-4636-a53a-8c75e95f185d {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1240.893966] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696999, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.905245] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1696997, 'name': Rename_Task, 'duration_secs': 0.139758} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.905245] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1240.905245] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b5d82cf-81eb-4480-93f4-5c8502ced245 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.910724] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1240.910724] env[63297]: value = "task-1697000" [ 1240.910724] env[63297]: _type = "Task" [ 1240.910724] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.920527] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1697000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.984559] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "refresh_cache-f3a579de-1f29-4b67-8dc8-07ea37267001" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.984739] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquired lock "refresh_cache-f3a579de-1f29-4b67-8dc8-07ea37267001" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.984899] env[63297]: DEBUG nova.network.neutron [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1241.210771] env[63297]: DEBUG nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1241.244672] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1241.245446] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1241.248923] env[63297]: DEBUG nova.virt.hardware [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1241.248923] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d5091e-47e8-4ffa-ab3b-d397522f5b77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.272989] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d45b90-b2aa-45d7-93dd-804cf4ef8d6b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.333404] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524513aa-a3df-0341-5895-59a15248fc24, 'name': SearchDatastore_Task, 'duration_secs': 0.009355} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.333510] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.333769] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1241.334042] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b249f65-2dc9-407f-b6f4-80a1ad74b537 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.343905] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1241.343905] env[63297]: value = "task-1697001" [ 1241.343905] env[63297]: _type = "Task" [ 1241.343905] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.360052] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.376373] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696998, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.378451] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bd5251-2707-4448-ade0-5e27dfe9a816 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.395877] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133c1e2f-6dc6-4103-88f3-f4c87e0c918f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.397794] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696999, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.440912] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cb0a22-7896-4aab-9643-7f96a7fae3ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.454304] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1697000, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.459076] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e87007-9226-41b7-acaa-ebf67d7931d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.475322] env[63297]: DEBUG nova.compute.provider_tree [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.537844] env[63297]: DEBUG nova.network.neutron [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1241.859062] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697001, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.879900] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696998, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.895711] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696999, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.897119] env[63297]: DEBUG nova.network.neutron [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Updating instance_info_cache with network_info: [{"id": "15254c40-f75e-453c-858a-af927dddc6c7", "address": "fa:16:3e:76:0b:e7", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15254c40-f7", "ovs_interfaceid": "15254c40-f75e-453c-858a-af927dddc6c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.949030] env[63297]: DEBUG oslo_vmware.api [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1697000, 'name': PowerOnVM_Task, 'duration_secs': 0.561031} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.949162] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1241.949356] env[63297]: INFO nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1241.949568] env[63297]: DEBUG nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1241.950353] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7a1ae1-62c0-4699-aa85-0724bf881c66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.981818] env[63297]: DEBUG nova.scheduler.client.report [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1242.355863] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626121} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.356170] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1242.356354] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1242.356611] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e779b63-170f-4df0-98f1-1e3b2d681da6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.367148] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1242.367148] env[63297]: value = "task-1697002" [ 1242.367148] env[63297]: _type = "Task" [ 1242.367148] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.382861] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.391022] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696998, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.397534] env[63297]: DEBUG oslo_vmware.api [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1696999, 'name': PowerOnVM_Task, 'duration_secs': 1.046484} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.398013] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1242.398329] env[63297]: INFO nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Took 11.33 seconds to spawn the instance on the hypervisor. [ 1242.398672] env[63297]: DEBUG nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1242.399835] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c11038-cf06-4019-bca2-dd6d3721da0c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.403483] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Releasing lock "refresh_cache-f3a579de-1f29-4b67-8dc8-07ea37267001" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.403908] env[63297]: DEBUG nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Instance network_info: |[{"id": "15254c40-f75e-453c-858a-af927dddc6c7", "address": "fa:16:3e:76:0b:e7", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15254c40-f7", "ovs_interfaceid": "15254c40-f75e-453c-858a-af927dddc6c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1242.404474] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:0b:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15254c40-f75e-453c-858a-af927dddc6c7', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1242.414024] env[63297]: DEBUG oslo.service.loopingcall [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1242.414889] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1242.415256] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf1aac79-7363-4ae1-834c-8eae609cd282 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.448568] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1242.448568] env[63297]: value = "task-1697003" [ 1242.448568] env[63297]: _type = "Task" [ 1242.448568] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.458425] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697003, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.475514] env[63297]: INFO nova.compute.manager [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Took 33.14 seconds to build instance. [ 1242.487206] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.814s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.487453] env[63297]: INFO nova.compute.manager [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Migrating [ 1242.487752] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.487910] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.492030] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.137s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.492030] env[63297]: DEBUG nova.objects.instance [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lazy-loading 'resources' on Instance uuid 961c3a87-7f53-4764-b8a4-40a408a30f90 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1242.496255] env[63297]: DEBUG nova.compute.manager [req-a7249c00-b97f-47e9-91a1-755b6052b6eb req-aa1d694a-801d-46a4-b0ff-3ecff42a7161 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Received event network-changed-15254c40-f75e-453c-858a-af927dddc6c7 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1242.496947] env[63297]: DEBUG nova.compute.manager [req-a7249c00-b97f-47e9-91a1-755b6052b6eb req-aa1d694a-801d-46a4-b0ff-3ecff42a7161 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Refreshing instance network info cache due to event network-changed-15254c40-f75e-453c-858a-af927dddc6c7. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1242.496947] env[63297]: DEBUG oslo_concurrency.lockutils [req-a7249c00-b97f-47e9-91a1-755b6052b6eb req-aa1d694a-801d-46a4-b0ff-3ecff42a7161 service nova] Acquiring lock "refresh_cache-f3a579de-1f29-4b67-8dc8-07ea37267001" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.496947] env[63297]: DEBUG oslo_concurrency.lockutils [req-a7249c00-b97f-47e9-91a1-755b6052b6eb req-aa1d694a-801d-46a4-b0ff-3ecff42a7161 service nova] Acquired lock "refresh_cache-f3a579de-1f29-4b67-8dc8-07ea37267001" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.497174] env[63297]: DEBUG nova.network.neutron [req-a7249c00-b97f-47e9-91a1-755b6052b6eb req-aa1d694a-801d-46a4-b0ff-3ecff42a7161 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Refreshing network info cache for port 15254c40-f75e-453c-858a-af927dddc6c7 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1242.877843] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1696998, 'name': CloneVM_Task, 'duration_secs': 1.641056} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.882275] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Created linked-clone VM from snapshot [ 1242.883374] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a58020-d029-4812-bd97-1fce18afddfc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.895176] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Uploading image 0da86208-107b-4969-bdcf-df764837b40e {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1242.897881] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082883} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.898095] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1242.902040] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c5b820-7857-4183-bf0e-33cbd8a18509 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.927723] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1242.930331] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ff8e9eb-9145-4281-bf37-de9c635275f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.965802] env[63297]: DEBUG oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1242.965802] env[63297]: value = "vm-353788" [ 1242.965802] env[63297]: _type = "VirtualMachine" [ 1242.965802] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1242.966584] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-76602966-2de3-46a9-8923-5e889e2b19f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.969215] env[63297]: INFO nova.compute.manager [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Took 35.18 seconds to build instance. [ 1242.970696] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1242.970696] env[63297]: value = "task-1697004" [ 1242.970696] env[63297]: _type = "Task" [ 1242.970696] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.977949] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8de2bb5f-8fed-4c0f-90df-281e829780e6 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "754e64ec-b6fa-49d8-9de6-ef38918378fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.486s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.978364] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697003, 'name': CreateVM_Task, 'duration_secs': 0.452494} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.980745] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1242.981261] env[63297]: DEBUG oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lease: (returnval){ [ 1242.981261] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b11032-2824-4925-8676-eaef8f6d6bb5" [ 1242.981261] env[63297]: _type = "HttpNfcLease" [ 1242.981261] env[63297]: } obtained for exporting VM: (result){ [ 1242.981261] env[63297]: value = "vm-353788" [ 1242.981261] env[63297]: _type = "VirtualMachine" [ 1242.981261] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1242.981708] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the lease: (returnval){ [ 1242.981708] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b11032-2824-4925-8676-eaef8f6d6bb5" [ 1242.981708] env[63297]: _type = "HttpNfcLease" [ 1242.981708] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1242.982450] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.982450] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.982613] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1242.986079] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a2730d1-5701-4dee-9fce-e491e562b8cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.992814] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.996849] env[63297]: INFO nova.compute.rpcapi [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1242.996849] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.011983] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1243.011983] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52533da0-ebe2-7c92-9d45-2a64f0cac372" [ 1243.011983] env[63297]: _type = "Task" [ 1243.011983] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.020372] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1243.020372] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b11032-2824-4925-8676-eaef8f6d6bb5" [ 1243.020372] env[63297]: _type = "HttpNfcLease" [ 1243.020372] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1243.027905] env[63297]: DEBUG oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1243.027905] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b11032-2824-4925-8676-eaef8f6d6bb5" [ 1243.027905] env[63297]: _type = "HttpNfcLease" [ 1243.027905] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1243.029824] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0138346a-ed56-4f4b-a467-2f48fc33ccc9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.039665] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52533da0-ebe2-7c92-9d45-2a64f0cac372, 'name': SearchDatastore_Task, 'duration_secs': 0.017688} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.045590] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.045927] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1243.046225] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.046413] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.046606] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1243.047321] env[63297]: DEBUG oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdc592-29bd-2350-1373-69770205c337/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1243.048699] env[63297]: DEBUG oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdc592-29bd-2350-1373-69770205c337/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1243.050143] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4af77d09-ebbf-46f6-8ea9-11ed59289642 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.118623] env[63297]: DEBUG nova.compute.manager [req-7d3137f0-88de-4a5d-9e5d-4d3e81cf134a req-e53126bd-8433-4768-ae18-15907b9d3440 service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Received event network-vif-plugged-1c87cf43-e466-4636-a53a-8c75e95f185d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1243.118843] env[63297]: DEBUG oslo_concurrency.lockutils [req-7d3137f0-88de-4a5d-9e5d-4d3e81cf134a req-e53126bd-8433-4768-ae18-15907b9d3440 service nova] Acquiring lock "14b4518e-044a-451a-845d-fa3742e5b3e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.119060] env[63297]: DEBUG oslo_concurrency.lockutils [req-7d3137f0-88de-4a5d-9e5d-4d3e81cf134a req-e53126bd-8433-4768-ae18-15907b9d3440 service nova] Lock "14b4518e-044a-451a-845d-fa3742e5b3e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.119225] env[63297]: DEBUG oslo_concurrency.lockutils [req-7d3137f0-88de-4a5d-9e5d-4d3e81cf134a req-e53126bd-8433-4768-ae18-15907b9d3440 service nova] Lock "14b4518e-044a-451a-845d-fa3742e5b3e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.119388] env[63297]: DEBUG nova.compute.manager [req-7d3137f0-88de-4a5d-9e5d-4d3e81cf134a req-e53126bd-8433-4768-ae18-15907b9d3440 service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] No waiting events found dispatching network-vif-plugged-1c87cf43-e466-4636-a53a-8c75e95f185d {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1243.119548] env[63297]: WARNING nova.compute.manager [req-7d3137f0-88de-4a5d-9e5d-4d3e81cf134a req-e53126bd-8433-4768-ae18-15907b9d3440 service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Received unexpected event network-vif-plugged-1c87cf43-e466-4636-a53a-8c75e95f185d for instance with vm_state building and task_state spawning. [ 1243.120263] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1243.122019] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1243.122978] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-809e9a6d-3cda-4528-8275-b3b5027496da {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.131253] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1243.131253] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5206c583-7272-0ea7-d056-c48ee245bcf6" [ 1243.131253] env[63297]: _type = "Task" [ 1243.131253] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.140219] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5206c583-7272-0ea7-d056-c48ee245bcf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.199024] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-32e8ef57-f679-41f5-b869-9ff2430e4911 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.352078] env[63297]: DEBUG nova.network.neutron [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Successfully updated port: 1c87cf43-e466-4636-a53a-8c75e95f185d {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1243.476783] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4afc9d62-b469-4972-bf90-1e465ee4c764 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "459d5a17-182b-4284-b464-57d342981031" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.291s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.489197] env[63297]: DEBUG nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1243.491750] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697004, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.527562] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.530854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.530854] env[63297]: DEBUG nova.network.neutron [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1243.650017] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5206c583-7272-0ea7-d056-c48ee245bcf6, 'name': SearchDatastore_Task, 'duration_secs': 0.016044} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.650017] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc8b46f3-c864-4beb-bede-58e9112f3080 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.656891] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1243.656891] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52176a0a-1053-8d0b-98a0-38a291be2071" [ 1243.656891] env[63297]: _type = "Task" [ 1243.656891] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.672652] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52176a0a-1053-8d0b-98a0-38a291be2071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.683426] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c070703a-3031-494a-a8cf-38db0e43078a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.691667] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbb3c26-97db-491d-b3bb-b17134253e55 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.743404] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1915905-d7a9-4c34-bc40-237cab0413d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.755703] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0dade7-2c3b-4c84-a1b8-89a0171b0b9a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.761972] env[63297]: DEBUG nova.network.neutron [req-a7249c00-b97f-47e9-91a1-755b6052b6eb req-aa1d694a-801d-46a4-b0ff-3ecff42a7161 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Updated VIF entry in instance network info cache for port 15254c40-f75e-453c-858a-af927dddc6c7. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1243.762583] env[63297]: DEBUG nova.network.neutron [req-a7249c00-b97f-47e9-91a1-755b6052b6eb req-aa1d694a-801d-46a4-b0ff-3ecff42a7161 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Updating instance_info_cache with network_info: [{"id": "15254c40-f75e-453c-858a-af927dddc6c7", "address": "fa:16:3e:76:0b:e7", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15254c40-f7", "ovs_interfaceid": "15254c40-f75e-453c-858a-af927dddc6c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.777082] env[63297]: DEBUG nova.compute.provider_tree [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1243.856860] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "refresh_cache-14b4518e-044a-451a-845d-fa3742e5b3e2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.856936] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "refresh_cache-14b4518e-044a-451a-845d-fa3742e5b3e2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.857216] env[63297]: DEBUG nova.network.neutron [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1243.979793] env[63297]: DEBUG nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1243.986515] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697004, 'name': ReconfigVM_Task, 'duration_secs': 0.537911} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.987379] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Reconfigured VM instance instance-00000015 to attach disk [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1243.987518] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06591a9a-1a2d-4504-95e4-b0c5028587e7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.995465] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1243.995465] env[63297]: value = "task-1697006" [ 1243.995465] env[63297]: _type = "Task" [ 1243.995465] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.007750] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697006, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.036930] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.170815] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52176a0a-1053-8d0b-98a0-38a291be2071, 'name': SearchDatastore_Task, 'duration_secs': 0.015525} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.172369] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.172991] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f3a579de-1f29-4b67-8dc8-07ea37267001/f3a579de-1f29-4b67-8dc8-07ea37267001.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1244.173884] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff859d6a-a064-4c62-b272-a27aef9c8d48 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.186543] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1244.186543] env[63297]: value = "task-1697007" [ 1244.186543] env[63297]: _type = "Task" [ 1244.186543] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.210278] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.266136] env[63297]: DEBUG oslo_concurrency.lockutils [req-a7249c00-b97f-47e9-91a1-755b6052b6eb req-aa1d694a-801d-46a4-b0ff-3ecff42a7161 service nova] Releasing lock "refresh_cache-f3a579de-1f29-4b67-8dc8-07ea37267001" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.319607] env[63297]: ERROR nova.scheduler.client.report [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [req-876a7277-9510-4022-b989-7a52f81755f7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-876a7277-9510-4022-b989-7a52f81755f7"}]} [ 1244.339264] env[63297]: DEBUG nova.scheduler.client.report [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1244.359749] env[63297]: DEBUG nova.scheduler.client.report [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1244.360142] env[63297]: DEBUG nova.compute.provider_tree [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1244.380412] env[63297]: DEBUG nova.scheduler.client.report [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1244.408701] env[63297]: DEBUG nova.scheduler.client.report [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1244.453332] env[63297]: DEBUG nova.network.neutron [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1244.472895] env[63297]: DEBUG nova.network.neutron [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance_info_cache with network_info: [{"id": "3db68b1a-d911-4324-b993-dc755277e56b", "address": "fa:16:3e:c3:7e:ea", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3db68b1a-d9", "ovs_interfaceid": "3db68b1a-d911-4324-b993-dc755277e56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.515723] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697006, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.520861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.522254] env[63297]: DEBUG nova.compute.manager [None req-445e4d3d-6349-4f16-969a-4de89ca9f105 tempest-ServerExternalEventsTest-479412812 tempest-ServerExternalEventsTest-479412812-project] [instance: 459d5a17-182b-4284-b464-57d342981031] Received event network-changed {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1244.522694] env[63297]: DEBUG nova.compute.manager [None req-445e4d3d-6349-4f16-969a-4de89ca9f105 tempest-ServerExternalEventsTest-479412812 tempest-ServerExternalEventsTest-479412812-project] [instance: 459d5a17-182b-4284-b464-57d342981031] Refreshing instance network info cache due to event network-changed. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1244.524140] env[63297]: DEBUG oslo_concurrency.lockutils [None req-445e4d3d-6349-4f16-969a-4de89ca9f105 tempest-ServerExternalEventsTest-479412812 tempest-ServerExternalEventsTest-479412812-project] Acquiring lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.524140] env[63297]: DEBUG oslo_concurrency.lockutils [None req-445e4d3d-6349-4f16-969a-4de89ca9f105 tempest-ServerExternalEventsTest-479412812 tempest-ServerExternalEventsTest-479412812-project] Acquired lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.524140] env[63297]: DEBUG nova.network.neutron [None req-445e4d3d-6349-4f16-969a-4de89ca9f105 tempest-ServerExternalEventsTest-479412812 tempest-ServerExternalEventsTest-479412812-project] [instance: 459d5a17-182b-4284-b464-57d342981031] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1244.703240] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697007, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.885466] env[63297]: DEBUG nova.network.neutron [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Updating instance_info_cache with network_info: [{"id": "1c87cf43-e466-4636-a53a-8c75e95f185d", "address": "fa:16:3e:bd:b6:08", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c87cf43-e4", "ovs_interfaceid": "1c87cf43-e466-4636-a53a-8c75e95f185d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.980643] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.014142] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697006, 'name': Rename_Task, 'duration_secs': 0.756277} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.015420] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1245.016293] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6994a251-35da-4483-bc43-f718397b3f5c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.018878] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c245695-753b-4ba4-b883-84bf4751869a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.033196] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45672ca-e7de-4499-8981-a3d75531e374 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.037162] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1245.037162] env[63297]: value = "task-1697008" [ 1245.037162] env[63297]: _type = "Task" [ 1245.037162] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.072352] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d84edb-e774-4697-9ed8-3c868382789c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.078440] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697008, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.084628] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81679555-93e2-4342-a342-a8b62df2d47b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.099542] env[63297]: DEBUG nova.compute.provider_tree [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1245.197489] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58274} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.197799] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f3a579de-1f29-4b67-8dc8-07ea37267001/f3a579de-1f29-4b67-8dc8-07ea37267001.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1245.198106] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1245.198502] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59cfba23-8f13-461e-8783-51004af46dfe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.205629] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1245.205629] env[63297]: value = "task-1697009" [ 1245.205629] env[63297]: _type = "Task" [ 1245.205629] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.214012] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.388094] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "refresh_cache-14b4518e-044a-451a-845d-fa3742e5b3e2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.389042] env[63297]: DEBUG nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Instance network_info: |[{"id": "1c87cf43-e466-4636-a53a-8c75e95f185d", "address": "fa:16:3e:bd:b6:08", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c87cf43-e4", "ovs_interfaceid": "1c87cf43-e466-4636-a53a-8c75e95f185d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1245.389409] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:b6:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c87cf43-e466-4636-a53a-8c75e95f185d', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1245.398744] env[63297]: DEBUG oslo.service.loopingcall [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1245.400073] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1245.400380] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0646705f-72c0-4ad2-80a4-6c75c8be2486 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.426372] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1245.426372] env[63297]: value = "task-1697010" [ 1245.426372] env[63297]: _type = "Task" [ 1245.426372] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.436451] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697010, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.497344] env[63297]: DEBUG nova.compute.manager [req-51062ccc-729c-4246-9e4d-c067bf503a1a req-2c5a1f14-107c-466c-b2fa-3af74c158e40 service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Received event network-changed-ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1245.497561] env[63297]: DEBUG nova.compute.manager [req-51062ccc-729c-4246-9e4d-c067bf503a1a req-2c5a1f14-107c-466c-b2fa-3af74c158e40 service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Refreshing instance network info cache due to event network-changed-ea7a3748-62ea-4bc1-b3ba-0da9fab212c3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1245.497770] env[63297]: DEBUG oslo_concurrency.lockutils [req-51062ccc-729c-4246-9e4d-c067bf503a1a req-2c5a1f14-107c-466c-b2fa-3af74c158e40 service nova] Acquiring lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.497913] env[63297]: DEBUG oslo_concurrency.lockutils [req-51062ccc-729c-4246-9e4d-c067bf503a1a req-2c5a1f14-107c-466c-b2fa-3af74c158e40 service nova] Acquired lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.498224] env[63297]: DEBUG nova.network.neutron [req-51062ccc-729c-4246-9e4d-c067bf503a1a req-2c5a1f14-107c-466c-b2fa-3af74c158e40 service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Refreshing network info cache for port ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1245.513344] env[63297]: DEBUG nova.network.neutron [None req-445e4d3d-6349-4f16-969a-4de89ca9f105 tempest-ServerExternalEventsTest-479412812 tempest-ServerExternalEventsTest-479412812-project] [instance: 459d5a17-182b-4284-b464-57d342981031] Updating instance_info_cache with network_info: [{"id": "07cc9161-e617-439f-97b5-70331464cd31", "address": "fa:16:3e:2b:c6:74", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07cc9161-e6", "ovs_interfaceid": "07cc9161-e617-439f-97b5-70331464cd31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.551971] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697008, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.653020] env[63297]: DEBUG nova.scheduler.client.report [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 44 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1245.653020] env[63297]: DEBUG nova.compute.provider_tree [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 44 to 45 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1245.653020] env[63297]: DEBUG nova.compute.provider_tree [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1245.720429] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088169} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.720429] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1245.720429] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a9f51d-53c5-41c3-9745-6b881c12d2f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.755515] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] f3a579de-1f29-4b67-8dc8-07ea37267001/f3a579de-1f29-4b67-8dc8-07ea37267001.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1245.755961] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c31694a9-fe92-4ec2-ae38-832a01e49605 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.780127] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1245.780127] env[63297]: value = "task-1697011" [ 1245.780127] env[63297]: _type = "Task" [ 1245.780127] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.796039] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697011, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.937880] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697010, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.015829] env[63297]: DEBUG oslo_concurrency.lockutils [None req-445e4d3d-6349-4f16-969a-4de89ca9f105 tempest-ServerExternalEventsTest-479412812 tempest-ServerExternalEventsTest-479412812-project] Releasing lock "refresh_cache-459d5a17-182b-4284-b464-57d342981031" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.053074] env[63297]: DEBUG oslo_vmware.api [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697008, 'name': PowerOnVM_Task, 'duration_secs': 0.737318} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.053074] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1246.053074] env[63297]: INFO nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Took 9.83 seconds to spawn the instance on the hypervisor. [ 1246.053295] env[63297]: DEBUG nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1246.054086] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12909598-680e-447a-bb87-b60f3ecba921 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.158477] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.669s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.162934] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.561s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.164570] env[63297]: INFO nova.compute.claims [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1246.173200] env[63297]: DEBUG oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255dec0-3fa2-9762-c115-33a888528364/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1246.174632] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e06269-83a4-4a8c-aaae-9ac098674663 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.185831] env[63297]: DEBUG oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255dec0-3fa2-9762-c115-33a888528364/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1246.186074] env[63297]: ERROR oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255dec0-3fa2-9762-c115-33a888528364/disk-0.vmdk due to incomplete transfer. [ 1246.186320] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-78df1ac5-2588-433f-8cfd-0bd2a2c68946 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.192630] env[63297]: INFO nova.scheduler.client.report [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Deleted allocations for instance 961c3a87-7f53-4764-b8a4-40a408a30f90 [ 1246.197867] env[63297]: DEBUG oslo_vmware.rw_handles [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255dec0-3fa2-9762-c115-33a888528364/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1246.198093] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Uploaded image 136bd841-b3df-4cec-b1d6-05f6102c5ab8 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1246.204148] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1246.204148] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-00d954a5-ce1b-498f-9c7c-7a8a09048067 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.210580] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1246.210580] env[63297]: value = "task-1697012" [ 1246.210580] env[63297]: _type = "Task" [ 1246.210580] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.225478] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697012, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.292478] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697011, 'name': ReconfigVM_Task, 'duration_secs': 0.422609} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.292478] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Reconfigured VM instance instance-00000016 to attach disk [datastore1] f3a579de-1f29-4b67-8dc8-07ea37267001/f3a579de-1f29-4b67-8dc8-07ea37267001.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1246.295018] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a00f9b2-380f-4ae3-8fd6-ed65cf5ffa82 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.304823] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1246.304823] env[63297]: value = "task-1697013" [ 1246.304823] env[63297]: _type = "Task" [ 1246.304823] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.313120] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697013, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.321020] env[63297]: DEBUG nova.network.neutron [req-51062ccc-729c-4246-9e4d-c067bf503a1a req-2c5a1f14-107c-466c-b2fa-3af74c158e40 service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Updated VIF entry in instance network info cache for port ea7a3748-62ea-4bc1-b3ba-0da9fab212c3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1246.321020] env[63297]: DEBUG nova.network.neutron [req-51062ccc-729c-4246-9e4d-c067bf503a1a req-2c5a1f14-107c-466c-b2fa-3af74c158e40 service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Updating instance_info_cache with network_info: [{"id": "ea7a3748-62ea-4bc1-b3ba-0da9fab212c3", "address": "fa:16:3e:53:81:f5", "network": {"id": "9301f145-9740-4669-9a28-38fb263d89d2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1155086179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e7e471b2638428080e76e0d2b740da4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea7a3748-62", "ovs_interfaceid": "ea7a3748-62ea-4bc1-b3ba-0da9fab212c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.438138] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697010, 'name': CreateVM_Task, 'duration_secs': 0.597901} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.438358] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1246.439063] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.439270] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.439847] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1246.440124] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5291b6f-2e24-4abe-baf5-9845fae48cb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.446200] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1246.446200] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e1ece6-1397-ce09-b5ff-28e192fdd04c" [ 1246.446200] env[63297]: _type = "Task" [ 1246.446200] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.455486] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e1ece6-1397-ce09-b5ff-28e192fdd04c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.506284] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5df234-2e72-465a-b4e2-796543f2622a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.548313] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance '9b1306f9-4b0a-4116-8e79-271478f33490' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1246.572073] env[63297]: INFO nova.compute.manager [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Took 36.37 seconds to build instance. [ 1246.605935] env[63297]: DEBUG nova.compute.manager [req-7afe60c4-a578-463d-9a90-6063daee9dfc req-229d20f7-66bc-4e2a-b406-424140d253e2 service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Received event network-changed-1c87cf43-e466-4636-a53a-8c75e95f185d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1246.606150] env[63297]: DEBUG nova.compute.manager [req-7afe60c4-a578-463d-9a90-6063daee9dfc req-229d20f7-66bc-4e2a-b406-424140d253e2 service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Refreshing instance network info cache due to event network-changed-1c87cf43-e466-4636-a53a-8c75e95f185d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1246.606382] env[63297]: DEBUG oslo_concurrency.lockutils [req-7afe60c4-a578-463d-9a90-6063daee9dfc req-229d20f7-66bc-4e2a-b406-424140d253e2 service nova] Acquiring lock "refresh_cache-14b4518e-044a-451a-845d-fa3742e5b3e2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.606514] env[63297]: DEBUG oslo_concurrency.lockutils [req-7afe60c4-a578-463d-9a90-6063daee9dfc req-229d20f7-66bc-4e2a-b406-424140d253e2 service nova] Acquired lock "refresh_cache-14b4518e-044a-451a-845d-fa3742e5b3e2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.606691] env[63297]: DEBUG nova.network.neutron [req-7afe60c4-a578-463d-9a90-6063daee9dfc req-229d20f7-66bc-4e2a-b406-424140d253e2 service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Refreshing network info cache for port 1c87cf43-e466-4636-a53a-8c75e95f185d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1246.705809] env[63297]: DEBUG oslo_concurrency.lockutils [None req-28cce3d4-66a6-4908-b3a6-ceeed4782e6e tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "961c3a87-7f53-4764-b8a4-40a408a30f90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.559s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.728062] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697012, 'name': Destroy_Task, 'duration_secs': 0.37569} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.728231] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Destroyed the VM [ 1246.728584] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1246.728854] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-186b72e9-c3dc-4fce-9ed7-987dfb5aefd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.737753] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1246.737753] env[63297]: value = "task-1697014" [ 1246.737753] env[63297]: _type = "Task" [ 1246.737753] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.758265] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697014, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.771332] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquiring lock "459d5a17-182b-4284-b464-57d342981031" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.771504] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "459d5a17-182b-4284-b464-57d342981031" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.771777] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquiring lock "459d5a17-182b-4284-b464-57d342981031-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.772065] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "459d5a17-182b-4284-b464-57d342981031-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.772301] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "459d5a17-182b-4284-b464-57d342981031-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.774605] env[63297]: INFO nova.compute.manager [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Terminating instance [ 1246.776642] env[63297]: DEBUG nova.compute.manager [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1246.776810] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1246.777668] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4984b428-2dee-4580-8bee-0bdc1f47f0f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.787832] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.788116] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-863a2351-b8af-4f5c-9758-7b6ddcbc5ee8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.798452] env[63297]: DEBUG oslo_vmware.api [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1246.798452] env[63297]: value = "task-1697015" [ 1246.798452] env[63297]: _type = "Task" [ 1246.798452] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.807191] env[63297]: DEBUG oslo_vmware.api [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1697015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.817852] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697013, 'name': Rename_Task, 'duration_secs': 0.193316} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.818220] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1246.818490] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de8d6160-e078-424a-8733-cafb1b0fba43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.822378] env[63297]: DEBUG oslo_concurrency.lockutils [req-51062ccc-729c-4246-9e4d-c067bf503a1a req-2c5a1f14-107c-466c-b2fa-3af74c158e40 service nova] Releasing lock "refresh_cache-754e64ec-b6fa-49d8-9de6-ef38918378fd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.828816] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1246.828816] env[63297]: value = "task-1697017" [ 1246.828816] env[63297]: _type = "Task" [ 1246.828816] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.842069] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697017, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.959517] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e1ece6-1397-ce09-b5ff-28e192fdd04c, 'name': SearchDatastore_Task, 'duration_secs': 0.02871} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.959896] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.960158] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1246.960419] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.960607] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.960753] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1246.961080] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84fc2af1-664b-442a-9b7a-79d359087ffb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.975502] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1246.979736] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1246.980771] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa5f7357-40fb-4571-abf4-1b588d409836 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.989397] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1246.989397] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5261d739-0254-b2de-6954-853ee96b44a9" [ 1246.989397] env[63297]: _type = "Task" [ 1246.989397] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.001492] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5261d739-0254-b2de-6954-853ee96b44a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.053871] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1247.054279] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f0bdf70-ba3c-49be-9776-31c293eac8f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.066045] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1247.066045] env[63297]: value = "task-1697021" [ 1247.066045] env[63297]: _type = "Task" [ 1247.066045] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.076964] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c8483841-f05f-4000-8bb1-ebcf8aedc67d tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "b14e8466-68ab-4705-a439-6db961a149b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.664s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.077403] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.254105] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697014, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.313182] env[63297]: DEBUG oslo_vmware.api [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1697015, 'name': PowerOffVM_Task, 'duration_secs': 0.319665} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.313514] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1247.314988] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1247.314988] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbbe7d15-098b-4ad4-97b2-7b849f45ba28 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.346530] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697017, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.393018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.393420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.424625] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1247.424851] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1247.425041] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Deleting the datastore file [datastore1] 459d5a17-182b-4284-b464-57d342981031 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1247.425313] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7c1e654-5d52-4290-840b-1405ea5f308a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.446316] env[63297]: DEBUG oslo_vmware.api [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for the task: (returnval){ [ 1247.446316] env[63297]: value = "task-1697023" [ 1247.446316] env[63297]: _type = "Task" [ 1247.446316] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.458658] env[63297]: DEBUG oslo_vmware.api [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1697023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.507022] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5261d739-0254-b2de-6954-853ee96b44a9, 'name': SearchDatastore_Task, 'duration_secs': 0.015352} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.507971] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab3ca07d-3b98-4497-a514-2ac3bc67a92f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.514812] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1247.514812] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e69e24-0019-291c-4ab8-d84cb54aa89e" [ 1247.514812] env[63297]: _type = "Task" [ 1247.514812] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.530118] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e69e24-0019-291c-4ab8-d84cb54aa89e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.582884] env[63297]: DEBUG nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1247.586609] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697021, 'name': PowerOffVM_Task, 'duration_secs': 0.240492} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.590169] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1247.590169] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance '9b1306f9-4b0a-4116-8e79-271478f33490' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1247.755334] env[63297]: DEBUG oslo_vmware.api [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697014, 'name': RemoveSnapshot_Task, 'duration_secs': 0.840328} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.758630] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1247.758884] env[63297]: INFO nova.compute.manager [None req-7a3bdffb-9036-411c-b040-c4e32207cf94 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Took 14.04 seconds to snapshot the instance on the hypervisor. [ 1247.793346] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33786d5-1c13-4c27-9c8a-2aeabb6813ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.798185] env[63297]: DEBUG nova.network.neutron [req-7afe60c4-a578-463d-9a90-6063daee9dfc req-229d20f7-66bc-4e2a-b406-424140d253e2 service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Updated VIF entry in instance network info cache for port 1c87cf43-e466-4636-a53a-8c75e95f185d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.799382] env[63297]: DEBUG nova.network.neutron [req-7afe60c4-a578-463d-9a90-6063daee9dfc req-229d20f7-66bc-4e2a-b406-424140d253e2 service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Updating instance_info_cache with network_info: [{"id": "1c87cf43-e466-4636-a53a-8c75e95f185d", "address": "fa:16:3e:bd:b6:08", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c87cf43-e4", "ovs_interfaceid": "1c87cf43-e466-4636-a53a-8c75e95f185d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.804392] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c017d04-b10d-4c04-a381-20dcd701395c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.845738] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0309736-c43d-43bc-8d6d-cb6e034aa628 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.857920] env[63297]: DEBUG oslo_vmware.api [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697017, 'name': PowerOnVM_Task, 'duration_secs': 0.646256} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.859380] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911a4da4-7307-4dcc-bb6f-a43a3bca60ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.863595] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1247.863869] env[63297]: INFO nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Took 8.95 seconds to spawn the instance on the hypervisor. [ 1247.864069] env[63297]: DEBUG nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1247.864859] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06da135b-2816-4acd-b6e1-75055537dc24 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.881419] env[63297]: DEBUG nova.compute.provider_tree [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1247.961190] env[63297]: DEBUG oslo_vmware.api [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Task: {'id': task-1697023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2673} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.961190] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1247.961190] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1247.961190] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1247.961190] env[63297]: INFO nova.compute.manager [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] [instance: 459d5a17-182b-4284-b464-57d342981031] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1247.962027] env[63297]: DEBUG oslo.service.loopingcall [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1247.962027] env[63297]: DEBUG nova.compute.manager [-] [instance: 459d5a17-182b-4284-b464-57d342981031] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1247.962027] env[63297]: DEBUG nova.network.neutron [-] [instance: 459d5a17-182b-4284-b464-57d342981031] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1248.026483] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e69e24-0019-291c-4ab8-d84cb54aa89e, 'name': SearchDatastore_Task, 'duration_secs': 0.015713} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.026743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.027041] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 14b4518e-044a-451a-845d-fa3742e5b3e2/14b4518e-044a-451a-845d-fa3742e5b3e2.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1248.027268] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc36e259-6579-4070-bf8f-9a16347d7597 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.036810] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1248.036810] env[63297]: value = "task-1697024" [ 1248.036810] env[63297]: _type = "Task" [ 1248.036810] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.048304] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.057191] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquiring lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.057541] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.097745] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1248.098108] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1248.098175] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1248.098311] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1248.098669] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1248.098669] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1248.098836] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1248.099022] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1248.099229] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1248.099834] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1248.099834] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1248.105650] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e9ed04f-f37f-41f4-b0e4-7f67871ae1f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.117992] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.126911] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1248.126911] env[63297]: value = "task-1697025" [ 1248.126911] env[63297]: _type = "Task" [ 1248.126911] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.139085] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697025, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.304873] env[63297]: DEBUG oslo_concurrency.lockutils [req-7afe60c4-a578-463d-9a90-6063daee9dfc req-229d20f7-66bc-4e2a-b406-424140d253e2 service nova] Releasing lock "refresh_cache-14b4518e-044a-451a-845d-fa3742e5b3e2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.399225] env[63297]: INFO nova.compute.manager [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Took 36.82 seconds to build instance. [ 1248.410806] env[63297]: ERROR nova.scheduler.client.report [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [req-26d51173-90be-44d5-ad2d-09c75f367397] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-26d51173-90be-44d5-ad2d-09c75f367397"}]} [ 1248.429903] env[63297]: DEBUG nova.scheduler.client.report [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1248.458640] env[63297]: DEBUG nova.scheduler.client.report [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1248.458970] env[63297]: DEBUG nova.compute.provider_tree [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1248.476335] env[63297]: DEBUG nova.scheduler.client.report [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1248.500684] env[63297]: DEBUG nova.scheduler.client.report [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1248.553709] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697024, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.640936] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697025, 'name': ReconfigVM_Task, 'duration_secs': 0.397442} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.644900] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance '9b1306f9-4b0a-4116-8e79-271478f33490' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1248.755337] env[63297]: DEBUG nova.compute.manager [req-ee2d6d94-5840-4667-a8e9-34d9de6f1a95 req-bec11683-4ffb-4d25-a0e7-3c625554c493 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Received event network-vif-deleted-07cc9161-e617-439f-97b5-70331464cd31 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1248.755539] env[63297]: INFO nova.compute.manager [req-ee2d6d94-5840-4667-a8e9-34d9de6f1a95 req-bec11683-4ffb-4d25-a0e7-3c625554c493 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Neutron deleted interface 07cc9161-e617-439f-97b5-70331464cd31; detaching it from the instance and deleting it from the info cache [ 1248.755712] env[63297]: DEBUG nova.network.neutron [req-ee2d6d94-5840-4667-a8e9-34d9de6f1a95 req-bec11683-4ffb-4d25-a0e7-3c625554c493 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.787733] env[63297]: DEBUG nova.network.neutron [-] [instance: 459d5a17-182b-4284-b464-57d342981031] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.901713] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adb6b6af-ecfd-402e-9162-8f52a8441915 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "f3a579de-1f29-4b67-8dc8-07ea37267001" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.214s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.949909] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e95fc91-5085-434e-a219-4a2d7243f110 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.962106] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9076c535-875a-41f8-bacb-657990756985 tempest-ServersAdminNegativeTestJSON-1288409101 tempest-ServersAdminNegativeTestJSON-1288409101-project-admin] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Suspending the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1248.962383] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c0d889ad-e328-4edb-9714-f57f59898c15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.972443] env[63297]: DEBUG oslo_vmware.api [None req-9076c535-875a-41f8-bacb-657990756985 tempest-ServersAdminNegativeTestJSON-1288409101 tempest-ServersAdminNegativeTestJSON-1288409101-project-admin] Waiting for the task: (returnval){ [ 1248.972443] env[63297]: value = "task-1697026" [ 1248.972443] env[63297]: _type = "Task" [ 1248.972443] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.982722] env[63297]: DEBUG oslo_vmware.api [None req-9076c535-875a-41f8-bacb-657990756985 tempest-ServersAdminNegativeTestJSON-1288409101 tempest-ServersAdminNegativeTestJSON-1288409101-project-admin] Task: {'id': task-1697026, 'name': SuspendVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.048743] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791836} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.048853] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 14b4518e-044a-451a-845d-fa3742e5b3e2/14b4518e-044a-451a-845d-fa3742e5b3e2.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1249.049167] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1249.049624] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b28c1004-3c94-4401-a7d6-f9816f9e5e84 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.061373] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1249.061373] env[63297]: value = "task-1697027" [ 1249.061373] env[63297]: _type = "Task" [ 1249.061373] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.068063] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76cf73a-557c-4c1a-9386-14155fc89153 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.072961] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.079386] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b7dbc3-9562-4720-b63b-bbfa0335afe7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.111695] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea129043-8464-4e28-8a07-b1f621df1329 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.121723] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51131fe5-4e1c-4760-a34c-2717ed2172cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.137018] env[63297]: DEBUG nova.compute.provider_tree [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.153527] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1249.154031] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1249.154031] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.154319] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1249.154319] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.154623] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1249.154623] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1249.154766] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1249.155289] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1249.155289] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1249.155395] env[63297]: DEBUG nova.virt.hardware [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1249.161401] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Reconfiguring VM instance instance-0000000b to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1249.161952] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47f9e5fe-f004-43e6-b6b3-3b7a2877445c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.184282] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1249.184282] env[63297]: value = "task-1697028" [ 1249.184282] env[63297]: _type = "Task" [ 1249.184282] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.196849] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697028, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.260158] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f605938d-2d71-4d59-a77a-9310589508d4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.279196] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cae4c03-5083-44b5-af8a-1d277f4aa10c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.296381] env[63297]: INFO nova.compute.manager [-] [instance: 459d5a17-182b-4284-b464-57d342981031] Took 1.33 seconds to deallocate network for instance. [ 1249.331597] env[63297]: DEBUG nova.compute.manager [req-ee2d6d94-5840-4667-a8e9-34d9de6f1a95 req-bec11683-4ffb-4d25-a0e7-3c625554c493 service nova] [instance: 459d5a17-182b-4284-b464-57d342981031] Detach interface failed, port_id=07cc9161-e617-439f-97b5-70331464cd31, reason: Instance 459d5a17-182b-4284-b464-57d342981031 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1249.405746] env[63297]: DEBUG nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1249.486760] env[63297]: DEBUG oslo_vmware.api [None req-9076c535-875a-41f8-bacb-657990756985 tempest-ServersAdminNegativeTestJSON-1288409101 tempest-ServersAdminNegativeTestJSON-1288409101-project-admin] Task: {'id': task-1697026, 'name': SuspendVM_Task} progress is 54%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.574333] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150795} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.574680] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1249.575566] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174926eb-3f52-4630-8446-4cb88cdfd18d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.602843] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 14b4518e-044a-451a-845d-fa3742e5b3e2/14b4518e-044a-451a-845d-fa3742e5b3e2.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1249.603232] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6174a4e6-ba8f-4232-bbe6-b1a6a69b85d4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.628356] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1249.628356] env[63297]: value = "task-1697029" [ 1249.628356] env[63297]: _type = "Task" [ 1249.628356] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.639511] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697029, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.644305] env[63297]: DEBUG nova.scheduler.client.report [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1249.696773] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697028, 'name': ReconfigVM_Task, 'duration_secs': 0.380669} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.697131] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Reconfigured VM instance instance-0000000b to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1249.698294] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ad96a1-a740-442c-9b89-b581f6c77265 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.724406] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 9b1306f9-4b0a-4116-8e79-271478f33490/9b1306f9-4b0a-4116-8e79-271478f33490.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1249.725082] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ebf84d5-a39a-428f-a7ae-55ad0e031eac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.748326] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1249.748326] env[63297]: value = "task-1697030" [ 1249.748326] env[63297]: _type = "Task" [ 1249.748326] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.758651] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697030, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.804398] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.936178] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.985099] env[63297]: DEBUG oslo_vmware.api [None req-9076c535-875a-41f8-bacb-657990756985 tempest-ServersAdminNegativeTestJSON-1288409101 tempest-ServersAdminNegativeTestJSON-1288409101-project-admin] Task: {'id': task-1697026, 'name': SuspendVM_Task, 'duration_secs': 0.714725} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.985510] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9076c535-875a-41f8-bacb-657990756985 tempest-ServersAdminNegativeTestJSON-1288409101 tempest-ServersAdminNegativeTestJSON-1288409101-project-admin] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Suspended the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1249.985953] env[63297]: DEBUG nova.compute.manager [None req-9076c535-875a-41f8-bacb-657990756985 tempest-ServersAdminNegativeTestJSON-1288409101 tempest-ServersAdminNegativeTestJSON-1288409101-project-admin] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1249.987225] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa39be9-038f-45d2-a088-0ba08c7f175e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.139469] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697029, 'name': ReconfigVM_Task, 'duration_secs': 0.383695} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.139740] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 14b4518e-044a-451a-845d-fa3742e5b3e2/14b4518e-044a-451a-845d-fa3742e5b3e2.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1250.140607] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3df26c7e-bfbc-4b04-bad1-d426fa90fdfa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.147241] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1250.147241] env[63297]: value = "task-1697032" [ 1250.147241] env[63297]: _type = "Task" [ 1250.147241] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.151340] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.988s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.151892] env[63297]: DEBUG nova.compute.manager [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1250.154846] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.146s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.154846] env[63297]: DEBUG nova.objects.instance [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1250.164630] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697032, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.259213] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697030, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.274710] env[63297]: DEBUG nova.compute.manager [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1250.275712] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc10eebb-0533-45c2-87e9-bca1bcf9d1d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.659955] env[63297]: DEBUG nova.compute.utils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1250.664665] env[63297]: DEBUG nova.compute.manager [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Not allocating networking since 'none' was specified. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1250.671680] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697032, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.683764] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.684012] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.759784] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697030, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.787312] env[63297]: INFO nova.compute.manager [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] instance snapshotting [ 1250.790267] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc404f6-b54e-42a8-9fec-43ba5221c56b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.810644] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bf7140-886a-475b-aa78-8675240d980b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.162018] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697032, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.165874] env[63297]: DEBUG nova.compute.manager [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1251.170041] env[63297]: DEBUG oslo_concurrency.lockutils [None req-af2923dc-1779-496a-b647-47c59f5e7510 tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.171703] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.241s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1251.173218] env[63297]: INFO nova.compute.claims [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1251.260873] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697030, 'name': ReconfigVM_Task, 'duration_secs': 1.356256} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.261208] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 9b1306f9-4b0a-4116-8e79-271478f33490/9b1306f9-4b0a-4116-8e79-271478f33490.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1251.261563] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance '9b1306f9-4b0a-4116-8e79-271478f33490' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1251.322793] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1251.323359] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9a576c41-650e-4b5e-bcac-aec79e88edc3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.332580] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1251.332580] env[63297]: value = "task-1697033" [ 1251.332580] env[63297]: _type = "Task" [ 1251.332580] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.341690] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697033, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.494011] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "f3a579de-1f29-4b67-8dc8-07ea37267001" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.494351] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "f3a579de-1f29-4b67-8dc8-07ea37267001" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1251.494593] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "f3a579de-1f29-4b67-8dc8-07ea37267001-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.494785] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "f3a579de-1f29-4b67-8dc8-07ea37267001-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1251.495043] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "f3a579de-1f29-4b67-8dc8-07ea37267001-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.497142] env[63297]: INFO nova.compute.manager [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Terminating instance [ 1251.499302] env[63297]: DEBUG nova.compute.manager [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1251.499498] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1251.500370] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ee06b7-6a1f-4f79-90b1-2cbd1a0461aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.508970] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1251.509261] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2ca7171-d73d-4ab0-96a4-8598f2c182ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.597794] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1251.598017] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1251.598220] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Deleting the datastore file [datastore1] f3a579de-1f29-4b67-8dc8-07ea37267001 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1251.598486] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a420428-5432-43a7-a2dd-6b2cd1b542af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.606799] env[63297]: DEBUG oslo_vmware.api [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1251.606799] env[63297]: value = "task-1697035" [ 1251.606799] env[63297]: _type = "Task" [ 1251.606799] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.616282] env[63297]: DEBUG oslo_vmware.api [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.662577] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697032, 'name': Rename_Task, 'duration_secs': 1.051688} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.662962] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1251.663231] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb700c9d-3428-45c7-b217-e00483e7ca80 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.674573] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1251.674573] env[63297]: value = "task-1697036" [ 1251.674573] env[63297]: _type = "Task" [ 1251.674573] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.688454] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.769112] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad13f3c-5a14-48c6-bc9d-0d0057c74796 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.790176] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70087455-13b1-46d7-973c-ae97ee557f66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.810205] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance '9b1306f9-4b0a-4116-8e79-271478f33490' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1251.843315] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697033, 'name': CreateSnapshot_Task, 'duration_secs': 0.49517} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.843706] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1251.844657] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5ff1c4-0bfe-4fc5-bc54-240d5d0f7597 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.121592] env[63297]: DEBUG oslo_vmware.api [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16409} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.121878] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1252.122055] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1252.122238] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1252.122417] env[63297]: INFO nova.compute.manager [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1252.122667] env[63297]: DEBUG oslo.service.loopingcall [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1252.122864] env[63297]: DEBUG nova.compute.manager [-] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1252.122958] env[63297]: DEBUG nova.network.neutron [-] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1252.177034] env[63297]: DEBUG nova.compute.manager [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1252.192448] env[63297]: DEBUG oslo_vmware.api [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697036, 'name': PowerOnVM_Task, 'duration_secs': 0.479334} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.192824] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1252.193106] env[63297]: INFO nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Took 10.98 seconds to spawn the instance on the hypervisor. [ 1252.193357] env[63297]: DEBUG nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1252.194352] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd26ce20-aa04-4b2c-a16f-498d05f5f326 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.221353] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1252.221836] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1252.222148] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1252.222388] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1252.222600] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1252.222828] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1252.224158] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1252.224400] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1252.224599] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1252.224775] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1252.224959] env[63297]: DEBUG nova.virt.hardware [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1252.225913] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0427605-27d9-462a-9ae6-ad69fa6ae155 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.238935] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8b896f-b02e-44c9-be00-182d8b8aa37e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.256190] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1252.262738] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Creating folder: Project (933aa128e22a414eaf54d2b07b06b047). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1252.271079] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-406c59ae-055e-4b18-adeb-72dc26d612bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.286123] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Created folder: Project (933aa128e22a414eaf54d2b07b06b047) in parent group-v353718. [ 1252.286376] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Creating folder: Instances. Parent ref: group-v353795. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1252.286644] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-912ad8a1-646a-4342-828e-8166aba96431 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.298728] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Created folder: Instances in parent group-v353795. [ 1252.299013] env[63297]: DEBUG oslo.service.loopingcall [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1252.302218] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1252.304074] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb732095-8ade-4dbb-9628-345dab95c764 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.326650] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1252.326650] env[63297]: value = "task-1697040" [ 1252.326650] env[63297]: _type = "Task" [ 1252.326650] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.336210] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697040, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.363968] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1252.367110] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6bee9337-fd5a-448d-b003-3ef12e708895 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.378464] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1252.378464] env[63297]: value = "task-1697041" [ 1252.378464] env[63297]: _type = "Task" [ 1252.378464] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.387735] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697041, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.447666] env[63297]: DEBUG nova.network.neutron [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Port 3db68b1a-d911-4324-b993-dc755277e56b binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1252.527788] env[63297]: DEBUG nova.compute.manager [req-c4cdc70f-306d-428d-bc91-e365a0fe9c96 req-b9a4071c-4045-473e-8b00-2eee85ecab04 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Received event network-vif-deleted-15254c40-f75e-453c-858a-af927dddc6c7 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1252.528010] env[63297]: INFO nova.compute.manager [req-c4cdc70f-306d-428d-bc91-e365a0fe9c96 req-b9a4071c-4045-473e-8b00-2eee85ecab04 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Neutron deleted interface 15254c40-f75e-453c-858a-af927dddc6c7; detaching it from the instance and deleting it from the info cache [ 1252.528259] env[63297]: DEBUG nova.network.neutron [req-c4cdc70f-306d-428d-bc91-e365a0fe9c96 req-b9a4071c-4045-473e-8b00-2eee85ecab04 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.718670] env[63297]: INFO nova.compute.manager [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Took 40.05 seconds to build instance. [ 1252.767412] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8ab863-6630-48f4-957b-733e6a19a228 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.777878] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324eeccc-edfb-43e1-8e72-861f04c9bfce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.811046] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839757c9-a4b9-4fa8-91fc-1fde85285483 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.820457] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d535ab12-9f3b-4fd1-8731-8617e86e9633 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.838769] env[63297]: DEBUG nova.compute.provider_tree [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.846974] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697040, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.892606] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697041, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.955503] env[63297]: DEBUG nova.network.neutron [-] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.031164] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b014598-b90f-48dc-81e5-c4e022a7808f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.042919] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0683135-822f-4b0c-8778-ff255d02cc03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.082551] env[63297]: DEBUG nova.compute.manager [req-c4cdc70f-306d-428d-bc91-e365a0fe9c96 req-b9a4071c-4045-473e-8b00-2eee85ecab04 service nova] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Detach interface failed, port_id=15254c40-f75e-453c-858a-af927dddc6c7, reason: Instance f3a579de-1f29-4b67-8dc8-07ea37267001 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1253.221980] env[63297]: DEBUG oslo_concurrency.lockutils [None req-70294515-60e9-452e-9e22-2ba70401d3df tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "14b4518e-044a-451a-845d-fa3742e5b3e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.566s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.344674] env[63297]: DEBUG nova.scheduler.client.report [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1253.349763] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697040, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.392405] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697041, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.476665] env[63297]: INFO nova.compute.manager [-] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Took 1.35 seconds to deallocate network for instance. [ 1253.484381] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.484662] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.484838] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.725327] env[63297]: DEBUG nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1253.841090] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697040, 'name': CreateVM_Task, 'duration_secs': 1.296711} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.841374] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1253.841913] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.842627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.842627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1253.842920] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8831c315-34c2-42fb-81de-2d4053e31196 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.850054] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1253.850054] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520a96db-2330-2d62-42fd-a09dedf4948d" [ 1253.850054] env[63297]: _type = "Task" [ 1253.850054] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.850950] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.679s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.851505] env[63297]: DEBUG nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1253.862196] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.352s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.862196] env[63297]: DEBUG nova.objects.instance [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lazy-loading 'resources' on Instance uuid ef3346b1-ce09-4616-bdf4-200ea31efd01 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1253.874016] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520a96db-2330-2d62-42fd-a09dedf4948d, 'name': SearchDatastore_Task, 'duration_secs': 0.016886} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.875556] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.875818] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1253.876306] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.876565] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.876887] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1253.877717] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-842cd226-9125-492c-bbd7-445af25b0678 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.894587] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697041, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.896810] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1253.897016] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1253.898189] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-410bea7c-ae2f-474c-845a-661ca8690b23 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.904651] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1253.904651] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5244d236-521b-8dff-3aa7-e91b6cd0a9df" [ 1253.904651] env[63297]: _type = "Task" [ 1253.904651] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.919012] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5244d236-521b-8dff-3aa7-e91b6cd0a9df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.996177] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.123124] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "8fa5fef6-8768-4e24-aab3-db56a10588c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.123379] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "8fa5fef6-8768-4e24-aab3-db56a10588c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.247787] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.364051] env[63297]: DEBUG nova.compute.utils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1254.369515] env[63297]: DEBUG nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1254.369810] env[63297]: DEBUG nova.network.neutron [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1254.395804] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697041, 'name': CloneVM_Task, 'duration_secs': 1.690859} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.398859] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Created linked-clone VM from snapshot [ 1254.400673] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4276b9-5033-4829-b354-59e69ab8fb9b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.415117] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Uploading image 4d130420-a69d-4a58-8280-7f001cd178d4 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1254.429730] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5244d236-521b-8dff-3aa7-e91b6cd0a9df, 'name': SearchDatastore_Task, 'duration_secs': 0.016928} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.430064] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bac94090-682b-4d0b-9d81-fdfaeb7a2830 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.437291] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1254.437291] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c11f58-8589-af2c-1188-9a1660eb4364" [ 1254.437291] env[63297]: _type = "Task" [ 1254.437291] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.452086] env[63297]: DEBUG oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1254.452086] env[63297]: value = "vm-353798" [ 1254.452086] env[63297]: _type = "VirtualMachine" [ 1254.452086] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1254.452540] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b2952390-be70-4147-a0de-7d577e52d9a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.456561] env[63297]: DEBUG nova.policy [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42c1eef518d7422c83cb75c14edc2e5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4701357dafc84216ae883b6e88b34d5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1254.462919] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c11f58-8589-af2c-1188-9a1660eb4364, 'name': SearchDatastore_Task, 'duration_secs': 0.012249} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.467090] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.467340] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 86a0579f-211c-42bc-925a-e30aaca4e0f5/86a0579f-211c-42bc-925a-e30aaca4e0f5.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1254.467923] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40d63421-ea6e-4f51-916c-54ab83ebc736 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.473529] env[63297]: DEBUG oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lease: (returnval){ [ 1254.473529] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524eca9e-2add-cc16-6279-ced4a5c0ae67" [ 1254.473529] env[63297]: _type = "HttpNfcLease" [ 1254.473529] env[63297]: } obtained for exporting VM: (result){ [ 1254.473529] env[63297]: value = "vm-353798" [ 1254.473529] env[63297]: _type = "VirtualMachine" [ 1254.473529] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1254.473761] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the lease: (returnval){ [ 1254.473761] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524eca9e-2add-cc16-6279-ced4a5c0ae67" [ 1254.473761] env[63297]: _type = "HttpNfcLease" [ 1254.473761] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1254.479169] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1254.479169] env[63297]: value = "task-1697044" [ 1254.479169] env[63297]: _type = "Task" [ 1254.479169] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.491174] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1254.491174] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524eca9e-2add-cc16-6279-ced4a5c0ae67" [ 1254.491174] env[63297]: _type = "HttpNfcLease" [ 1254.491174] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1254.494719] env[63297]: DEBUG oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1254.494719] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524eca9e-2add-cc16-6279-ced4a5c0ae67" [ 1254.494719] env[63297]: _type = "HttpNfcLease" [ 1254.494719] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1254.498147] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697044, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.502265] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c15c01e-e24f-474b-980f-c930d62692bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.513566] env[63297]: DEBUG oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520b8dbc-6361-8a1f-2529-edb277579b41/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1254.513566] env[63297]: DEBUG oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520b8dbc-6361-8a1f-2529-edb277579b41/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1254.574096] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.574321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.574456] env[63297]: DEBUG nova.network.neutron [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1254.625128] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-eb54e020-01cd-46c8-affd-b644fdaab0c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.873070] env[63297]: DEBUG nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1254.896957] env[63297]: DEBUG nova.network.neutron [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Successfully created port: 1e0746e7-53ea-4197-8089-36ec9df7fadb {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1254.995335] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697044, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.997579] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6e537d-b115-4813-b945-a2fc55042dcb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.007258] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07aa5494-803d-4ab2-83be-1bbba1dd4fdc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.052388] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27fe9b9-3348-41b0-8993-ae0f3aca51e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.062178] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbd17db-e62d-44a6-9e19-16182655e354 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.083626] env[63297]: DEBUG nova.compute.provider_tree [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.115368] env[63297]: DEBUG oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdc592-29bd-2350-1373-69770205c337/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1255.116926] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dbd9b7-e7ea-4d79-8b3a-2dd9353f4147 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.124817] env[63297]: DEBUG oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdc592-29bd-2350-1373-69770205c337/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1255.125825] env[63297]: ERROR oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdc592-29bd-2350-1373-69770205c337/disk-0.vmdk due to incomplete transfer. [ 1255.128238] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-08e4bcd2-2a8d-4b25-ac98-6f48ab600c0a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.138140] env[63297]: DEBUG oslo_vmware.rw_handles [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cdc592-29bd-2350-1373-69770205c337/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1255.138281] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Uploaded image 0da86208-107b-4969-bdcf-df764837b40e to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1255.140337] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1255.140337] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-70c5381f-22a9-41f3-a35e-5b640d12ce14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.148446] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1255.148446] env[63297]: value = "task-1697045" [ 1255.148446] env[63297]: _type = "Task" [ 1255.148446] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.162367] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697045, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.367113] env[63297]: DEBUG nova.network.neutron [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance_info_cache with network_info: [{"id": "3db68b1a-d911-4324-b993-dc755277e56b", "address": "fa:16:3e:c3:7e:ea", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3db68b1a-d9", "ovs_interfaceid": "3db68b1a-d911-4324-b993-dc755277e56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.497535] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697044, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556975} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.497937] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 86a0579f-211c-42bc-925a-e30aaca4e0f5/86a0579f-211c-42bc-925a-e30aaca4e0f5.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1255.498372] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1255.498665] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-148979b6-06af-4171-9bce-65e7e2d0119d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.507320] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1255.507320] env[63297]: value = "task-1697046" [ 1255.507320] env[63297]: _type = "Task" [ 1255.507320] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.532554] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697046, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.586645] env[63297]: DEBUG nova.scheduler.client.report [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1255.659544] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697045, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.870157] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.886436] env[63297]: DEBUG nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1255.908717] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1255.909212] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1255.909212] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1255.909389] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1255.910368] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1255.910368] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1255.910368] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1255.910368] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1255.910743] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1255.910743] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1255.910894] env[63297]: DEBUG nova.virt.hardware [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1255.911833] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8018dfe6-a499-438c-8caf-b65c0129ec2f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.922756] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe45333-fab5-4291-a580-71c30d89e9d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.018283] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697046, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16859} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.018757] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1256.019651] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8067fcc-e618-4176-8d27-ab5fd97071f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.044084] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 86a0579f-211c-42bc-925a-e30aaca4e0f5/86a0579f-211c-42bc-925a-e30aaca4e0f5.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.048118] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc8334a0-d32d-4895-8a4e-13fe44fc57c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.074341] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1256.074341] env[63297]: value = "task-1697047" [ 1256.074341] env[63297]: _type = "Task" [ 1256.074341] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.083827] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697047, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.092403] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.232s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.094912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.577s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.095367] env[63297]: DEBUG nova.objects.instance [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lazy-loading 'resources' on Instance uuid 6d290634-67e7-4fb4-9a88-3da6eca34d4b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1256.117968] env[63297]: INFO nova.scheduler.client.report [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleted allocations for instance ef3346b1-ce09-4616-bdf4-200ea31efd01 [ 1256.159739] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697045, 'name': Destroy_Task, 'duration_secs': 0.57724} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.160052] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Destroyed the VM [ 1256.160649] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1256.160742] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e6c0c744-97d6-4af8-a378-f71db5a38629 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.172129] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1256.172129] env[63297]: value = "task-1697048" [ 1256.172129] env[63297]: _type = "Task" [ 1256.172129] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.183090] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697048, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.400774] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a10b0d-95d3-4606-a96a-688c6079e314 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.424496] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec172c9a-2581-41e1-a8ba-db3d7114482f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.432563] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance '9b1306f9-4b0a-4116-8e79-271478f33490' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1256.503920] env[63297]: DEBUG nova.compute.manager [req-cd1efc9e-8424-4a8c-94b5-715ffcb6e968 req-3bb491fe-06c4-42fd-a0eb-da799c96a809 service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Received event network-vif-plugged-1e0746e7-53ea-4197-8089-36ec9df7fadb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1256.503920] env[63297]: DEBUG oslo_concurrency.lockutils [req-cd1efc9e-8424-4a8c-94b5-715ffcb6e968 req-3bb491fe-06c4-42fd-a0eb-da799c96a809 service nova] Acquiring lock "8c10c573-de56-4c72-959a-65bf53b805a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.504012] env[63297]: DEBUG oslo_concurrency.lockutils [req-cd1efc9e-8424-4a8c-94b5-715ffcb6e968 req-3bb491fe-06c4-42fd-a0eb-da799c96a809 service nova] Lock "8c10c573-de56-4c72-959a-65bf53b805a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.504262] env[63297]: DEBUG oslo_concurrency.lockutils [req-cd1efc9e-8424-4a8c-94b5-715ffcb6e968 req-3bb491fe-06c4-42fd-a0eb-da799c96a809 service nova] Lock "8c10c573-de56-4c72-959a-65bf53b805a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.504512] env[63297]: DEBUG nova.compute.manager [req-cd1efc9e-8424-4a8c-94b5-715ffcb6e968 req-3bb491fe-06c4-42fd-a0eb-da799c96a809 service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] No waiting events found dispatching network-vif-plugged-1e0746e7-53ea-4197-8089-36ec9df7fadb {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1256.504754] env[63297]: WARNING nova.compute.manager [req-cd1efc9e-8424-4a8c-94b5-715ffcb6e968 req-3bb491fe-06c4-42fd-a0eb-da799c96a809 service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Received unexpected event network-vif-plugged-1e0746e7-53ea-4197-8089-36ec9df7fadb for instance with vm_state building and task_state spawning. [ 1256.586278] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697047, 'name': ReconfigVM_Task, 'duration_secs': 0.354143} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.586653] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 86a0579f-211c-42bc-925a-e30aaca4e0f5/86a0579f-211c-42bc-925a-e30aaca4e0f5.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1256.587393] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48b29365-a27c-4826-863e-ed3806bcf0c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.595978] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1256.595978] env[63297]: value = "task-1697050" [ 1256.595978] env[63297]: _type = "Task" [ 1256.595978] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.609849] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697050, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.620788] env[63297]: DEBUG nova.network.neutron [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Successfully updated port: 1e0746e7-53ea-4197-8089-36ec9df7fadb {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1256.628619] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b8a5aa70-0ee3-4742-8691-ffd74e184bea tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "ef3346b1-ce09-4616-bdf4-200ea31efd01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.791s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.683844] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697048, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.939573] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1256.939573] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e1cda1b-d9b8-471b-8282-86d04a99c2ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.948972] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1256.948972] env[63297]: value = "task-1697051" [ 1256.948972] env[63297]: _type = "Task" [ 1256.948972] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.958804] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697051, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.110837] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697050, 'name': Rename_Task, 'duration_secs': 0.160362} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.111141] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.111420] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53bb84af-b738-46eb-be48-4f0246445cf4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.115643] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7117c81-97b4-4724-b742-11c31b0c0608 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.119789] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1257.119789] env[63297]: value = "task-1697052" [ 1257.119789] env[63297]: _type = "Task" [ 1257.119789] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.127313] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "refresh_cache-8c10c573-de56-4c72-959a-65bf53b805a5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.127531] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquired lock "refresh_cache-8c10c573-de56-4c72-959a-65bf53b805a5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.127741] env[63297]: DEBUG nova.network.neutron [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1257.131940] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfd689b-839a-499d-b41f-2360e5c8a16c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.143264] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.178546] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f0d204-f0bd-4784-8a57-5f2a9cc2b8c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.189925] env[63297]: DEBUG oslo_vmware.api [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697048, 'name': RemoveSnapshot_Task, 'duration_secs': 0.646064} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.190333] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1257.190758] env[63297]: INFO nova.compute.manager [None req-edcb98f8-2c6c-432d-b197-3593571dc8ce tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Took 18.99 seconds to snapshot the instance on the hypervisor. [ 1257.194901] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad0be4e-5fb4-4551-92a9-e571c3ff932e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.210116] env[63297]: DEBUG nova.compute.provider_tree [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.461402] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697051, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.631874] env[63297]: DEBUG oslo_vmware.api [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697052, 'name': PowerOnVM_Task, 'duration_secs': 0.511243} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.632201] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1257.632503] env[63297]: INFO nova.compute.manager [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Took 5.46 seconds to spawn the instance on the hypervisor. [ 1257.632759] env[63297]: DEBUG nova.compute.manager [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1257.634018] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9f2775-7656-454c-86bd-4644ffa1bfa9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.683913] env[63297]: DEBUG nova.network.neutron [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1257.713863] env[63297]: DEBUG nova.scheduler.client.report [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1257.907650] env[63297]: DEBUG nova.network.neutron [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Updating instance_info_cache with network_info: [{"id": "1e0746e7-53ea-4197-8089-36ec9df7fadb", "address": "fa:16:3e:0e:d2:99", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0746e7-53", "ovs_interfaceid": "1e0746e7-53ea-4197-8089-36ec9df7fadb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.963825] env[63297]: DEBUG oslo_vmware.api [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697051, 'name': PowerOnVM_Task, 'duration_secs': 0.532591} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.963825] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1257.964286] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3721e31-0609-4f5e-b88e-382402a80ef3 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance '9b1306f9-4b0a-4116-8e79-271478f33490' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1258.158041] env[63297]: INFO nova.compute.manager [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Took 38.59 seconds to build instance. [ 1258.219933] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.228018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.003s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.228018] env[63297]: INFO nova.compute.claims [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1258.251182] env[63297]: INFO nova.scheduler.client.report [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Deleted allocations for instance 6d290634-67e7-4fb4-9a88-3da6eca34d4b [ 1258.410293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Releasing lock "refresh_cache-8c10c573-de56-4c72-959a-65bf53b805a5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.410769] env[63297]: DEBUG nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Instance network_info: |[{"id": "1e0746e7-53ea-4197-8089-36ec9df7fadb", "address": "fa:16:3e:0e:d2:99", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0746e7-53", "ovs_interfaceid": "1e0746e7-53ea-4197-8089-36ec9df7fadb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1258.411310] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:d2:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e0746e7-53ea-4197-8089-36ec9df7fadb', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1258.421114] env[63297]: DEBUG oslo.service.loopingcall [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1258.421809] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1258.421809] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6a53475-9088-4053-a4f0-9d287c8152bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.447103] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1258.447103] env[63297]: value = "task-1697054" [ 1258.447103] env[63297]: _type = "Task" [ 1258.447103] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.456473] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697054, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.659336] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e5a653e-7977-44fc-99ba-2b303ce6c9a6 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "86a0579f-211c-42bc-925a-e30aaca4e0f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.942s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.764432] env[63297]: DEBUG oslo_concurrency.lockutils [None req-998d107f-e3e9-430c-a875-3e3cd64fcade tempest-ServerDiagnosticsNegativeTest-1386005932 tempest-ServerDiagnosticsNegativeTest-1386005932-project-member] Lock "6d290634-67e7-4fb4-9a88-3da6eca34d4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.214s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.793862] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.794326] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.867774] env[63297]: DEBUG nova.compute.manager [req-56b6abe7-9297-4783-879c-a39fcb8cf1e0 req-ef73bcf6-c8ff-452a-a495-c58884ce6164 service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Received event network-changed-1e0746e7-53ea-4197-8089-36ec9df7fadb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1258.867995] env[63297]: DEBUG nova.compute.manager [req-56b6abe7-9297-4783-879c-a39fcb8cf1e0 req-ef73bcf6-c8ff-452a-a495-c58884ce6164 service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Refreshing instance network info cache due to event network-changed-1e0746e7-53ea-4197-8089-36ec9df7fadb. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1258.868249] env[63297]: DEBUG oslo_concurrency.lockutils [req-56b6abe7-9297-4783-879c-a39fcb8cf1e0 req-ef73bcf6-c8ff-452a-a495-c58884ce6164 service nova] Acquiring lock "refresh_cache-8c10c573-de56-4c72-959a-65bf53b805a5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.868399] env[63297]: DEBUG oslo_concurrency.lockutils [req-56b6abe7-9297-4783-879c-a39fcb8cf1e0 req-ef73bcf6-c8ff-452a-a495-c58884ce6164 service nova] Acquired lock "refresh_cache-8c10c573-de56-4c72-959a-65bf53b805a5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.868560] env[63297]: DEBUG nova.network.neutron [req-56b6abe7-9297-4783-879c-a39fcb8cf1e0 req-ef73bcf6-c8ff-452a-a495-c58884ce6164 service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Refreshing network info cache for port 1e0746e7-53ea-4197-8089-36ec9df7fadb {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1258.961188] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697054, 'name': CreateVM_Task, 'duration_secs': 0.390207} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.961566] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1258.962288] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.962491] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.962826] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1258.963100] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e2040ae-da2f-4f4c-a5ed-f6914ecd64c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.968385] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1258.968385] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52796542-952b-d5cd-af8d-caaf9c1f65d0" [ 1258.968385] env[63297]: _type = "Task" [ 1258.968385] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.981913] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52796542-952b-d5cd-af8d-caaf9c1f65d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.161957] env[63297]: DEBUG nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1259.166597] env[63297]: DEBUG nova.compute.manager [None req-f64dd68e-232d-4642-957c-05a60ebf8030 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1259.167294] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd824cc-b5fe-4655-8ade-878ce0b38718 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.428664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquiring lock "86a0579f-211c-42bc-925a-e30aaca4e0f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.428664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "86a0579f-211c-42bc-925a-e30aaca4e0f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.428664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquiring lock "86a0579f-211c-42bc-925a-e30aaca4e0f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.428664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "86a0579f-211c-42bc-925a-e30aaca4e0f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.428664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "86a0579f-211c-42bc-925a-e30aaca4e0f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.433272] env[63297]: INFO nova.compute.manager [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Terminating instance [ 1259.435165] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquiring lock "refresh_cache-86a0579f-211c-42bc-925a-e30aaca4e0f5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.435474] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquired lock "refresh_cache-86a0579f-211c-42bc-925a-e30aaca4e0f5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.435685] env[63297]: DEBUG nova.network.neutron [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1259.489241] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52796542-952b-d5cd-af8d-caaf9c1f65d0, 'name': SearchDatastore_Task, 'duration_secs': 0.023289} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.491309] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.491572] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1259.491804] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.491949] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.492160] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1259.493779] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f5b1b81-2931-42d1-8eac-753f6fa5137c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.506353] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1259.506607] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1259.511092] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6f5bd31-7a70-46c4-ac04-413fc3244ae8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.523022] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1259.523022] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d4bbe-3bac-a8e8-b916-23021216a10e" [ 1259.523022] env[63297]: _type = "Task" [ 1259.523022] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.533856] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d4bbe-3bac-a8e8-b916-23021216a10e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.680159] env[63297]: INFO nova.compute.manager [None req-f64dd68e-232d-4642-957c-05a60ebf8030 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] instance snapshotting [ 1259.680905] env[63297]: DEBUG nova.objects.instance [None req-f64dd68e-232d-4642-957c-05a60ebf8030 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lazy-loading 'flavor' on Instance uuid 86a0579f-211c-42bc-925a-e30aaca4e0f5 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.688418] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.760316] env[63297]: DEBUG nova.network.neutron [req-56b6abe7-9297-4783-879c-a39fcb8cf1e0 req-ef73bcf6-c8ff-452a-a495-c58884ce6164 service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Updated VIF entry in instance network info cache for port 1e0746e7-53ea-4197-8089-36ec9df7fadb. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1259.760648] env[63297]: DEBUG nova.network.neutron [req-56b6abe7-9297-4783-879c-a39fcb8cf1e0 req-ef73bcf6-c8ff-452a-a495-c58884ce6164 service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Updating instance_info_cache with network_info: [{"id": "1e0746e7-53ea-4197-8089-36ec9df7fadb", "address": "fa:16:3e:0e:d2:99", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0746e7-53", "ovs_interfaceid": "1e0746e7-53ea-4197-8089-36ec9df7fadb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.800026] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac891ae2-18e5-43ca-a716-07fe909b4101 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.809720] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e54a173-090a-4f92-ad7b-48d8c2e0d983 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.842683] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd796af-2121-4578-b190-a283a76f5a56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.854523] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2c9989-0577-4be1-a51d-4209f5c0ae73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.870476] env[63297]: DEBUG nova.compute.provider_tree [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.956357] env[63297]: DEBUG nova.network.neutron [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1260.026780] env[63297]: DEBUG nova.network.neutron [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.032466] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d4bbe-3bac-a8e8-b916-23021216a10e, 'name': SearchDatastore_Task, 'duration_secs': 0.014416} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.033559] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-573cb179-a98b-43a1-8f45-293c01b85159 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.040205] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1260.040205] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522f7996-5d79-e06f-e1bb-a30c9cfe8fe2" [ 1260.040205] env[63297]: _type = "Task" [ 1260.040205] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.050105] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522f7996-5d79-e06f-e1bb-a30c9cfe8fe2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.083024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "9b1306f9-4b0a-4116-8e79-271478f33490" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.083024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.083024] env[63297]: DEBUG nova.compute.manager [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Going to confirm migration 1 {{(pid=63297) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1260.189340] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faeff7d5-78d3-401d-854b-fa33b88e1f8f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.212174] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c78ef71-a9e0-4107-bbb8-6e5b41aef4c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.264783] env[63297]: DEBUG oslo_concurrency.lockutils [req-56b6abe7-9297-4783-879c-a39fcb8cf1e0 req-ef73bcf6-c8ff-452a-a495-c58884ce6164 service nova] Releasing lock "refresh_cache-8c10c573-de56-4c72-959a-65bf53b805a5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.362036] env[63297]: DEBUG nova.compute.manager [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1260.362484] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90daf3ca-eeeb-4bdb-9aba-0f00cd8cbaf7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.373675] env[63297]: DEBUG nova.scheduler.client.report [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1260.535280] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Releasing lock "refresh_cache-86a0579f-211c-42bc-925a-e30aaca4e0f5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.536323] env[63297]: DEBUG nova.compute.manager [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1260.536323] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1260.536975] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129b3146-b275-4c4f-b7e6-280fd8e6e97a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.546589] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1260.547304] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abdcfdba-bbae-400a-b992-25fc48ddf1c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.553714] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522f7996-5d79-e06f-e1bb-a30c9cfe8fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.011292} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.555225] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.555501] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 8c10c573-de56-4c72-959a-65bf53b805a5/8c10c573-de56-4c72-959a-65bf53b805a5.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1260.555869] env[63297]: DEBUG oslo_vmware.api [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1260.555869] env[63297]: value = "task-1697056" [ 1260.555869] env[63297]: _type = "Task" [ 1260.555869] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.556213] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62394114-7bf4-453a-90c1-5bac4836a6d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.571219] env[63297]: DEBUG oslo_vmware.api [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.573218] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1260.573218] env[63297]: value = "task-1697057" [ 1260.573218] env[63297]: _type = "Task" [ 1260.573218] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.584063] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.726284] env[63297]: DEBUG nova.compute.manager [None req-f64dd68e-232d-4642-957c-05a60ebf8030 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Instance disappeared during snapshot {{(pid=63297) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1260.882038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.882428] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1260.888606] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1260.888911] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.889198] env[63297]: DEBUG nova.network.neutron [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1260.889491] env[63297]: DEBUG nova.objects.instance [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lazy-loading 'info_cache' on Instance uuid 9b1306f9-4b0a-4116-8e79-271478f33490 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1260.891454] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.596s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.893658] env[63297]: INFO nova.compute.claims [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1260.901021] env[63297]: INFO nova.compute.manager [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] instance snapshotting [ 1260.901930] env[63297]: DEBUG nova.compute.manager [None req-f64dd68e-232d-4642-957c-05a60ebf8030 tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Found 0 images (rotation: 2) {{(pid=63297) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1260.907150] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11667de-ed5a-44bb-928a-cc1351e1fbf2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.941498] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34951f61-0f93-4fc8-81ae-244846ac9dc4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.070874] env[63297]: DEBUG oslo_vmware.api [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697056, 'name': PowerOffVM_Task, 'duration_secs': 0.155863} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.071222] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.071466] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1261.071727] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9622ca7b-cb1d-412c-9fae-38b1118fadbb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.084746] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697057, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511033} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.085010] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 8c10c573-de56-4c72-959a-65bf53b805a5/8c10c573-de56-4c72-959a-65bf53b805a5.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1261.085718] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1261.085718] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c108ab48-2511-43a3-9c9a-761fb15101fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.096764] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1261.096764] env[63297]: value = "task-1697059" [ 1261.096764] env[63297]: _type = "Task" [ 1261.096764] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.106408] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697059, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.108985] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1261.109367] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1261.109558] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Deleting the datastore file [datastore1] 86a0579f-211c-42bc-925a-e30aaca4e0f5 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1261.110279] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1b57dc2-a006-4660-be77-c6d0b6241b86 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.119361] env[63297]: DEBUG oslo_vmware.api [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for the task: (returnval){ [ 1261.119361] env[63297]: value = "task-1697060" [ 1261.119361] env[63297]: _type = "Task" [ 1261.119361] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.128995] env[63297]: DEBUG oslo_vmware.api [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.410221] env[63297]: DEBUG nova.compute.utils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1261.411372] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1261.411620] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1261.460903] env[63297]: DEBUG nova.policy [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38504ea2d496450ca516b1e42b84566d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a362e1c3f514beeaaf5db9823da5cc3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1261.464865] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1261.465432] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4308825b-e86a-45f9-a474-35408815c8f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.476698] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1261.476698] env[63297]: value = "task-1697061" [ 1261.476698] env[63297]: _type = "Task" [ 1261.476698] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.489096] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697061, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.608421] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697059, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090997} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.609651] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1261.609738] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a88867-a538-490d-ad82-439f274d5284 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.639158] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 8c10c573-de56-4c72-959a-65bf53b805a5/8c10c573-de56-4c72-959a-65bf53b805a5.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1261.645124] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab252322-3c01-40c3-b178-7bdc29c038ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.667686] env[63297]: DEBUG oslo_vmware.api [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Task: {'id': task-1697060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097194} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.669032] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1261.670462] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1261.670462] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1261.670462] env[63297]: INFO nova.compute.manager [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1261.670462] env[63297]: DEBUG oslo.service.loopingcall [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1261.670715] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1261.670715] env[63297]: value = "task-1697062" [ 1261.670715] env[63297]: _type = "Task" [ 1261.670715] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.671090] env[63297]: DEBUG nova.compute.manager [-] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1261.673018] env[63297]: DEBUG nova.network.neutron [-] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1261.684876] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697062, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.691065] env[63297]: DEBUG nova.network.neutron [-] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1261.827896] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Successfully created port: a0f734f5-ff87-41a5-ac99-57124d62dcda {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1261.916506] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1261.994192] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697061, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.186409] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697062, 'name': ReconfigVM_Task, 'duration_secs': 0.300359} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.186696] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 8c10c573-de56-4c72-959a-65bf53b805a5/8c10c573-de56-4c72-959a-65bf53b805a5.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1262.187337] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e9b2669-bfd4-4d3d-a295-55cc92e71510 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.193753] env[63297]: DEBUG nova.network.neutron [-] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.200087] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1262.200087] env[63297]: value = "task-1697063" [ 1262.200087] env[63297]: _type = "Task" [ 1262.200087] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.212766] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697063, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.256504] env[63297]: DEBUG nova.network.neutron [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance_info_cache with network_info: [{"id": "3db68b1a-d911-4324-b993-dc755277e56b", "address": "fa:16:3e:c3:7e:ea", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3db68b1a-d9", "ovs_interfaceid": "3db68b1a-d911-4324-b993-dc755277e56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.458780] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454d073c-e715-4e99-8808-643cba253565 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.467312] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff8f3cf-303e-4731-b96a-111e44cbffce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.505737] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506cb939-475e-4b8c-a531-ea655d2036b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.515127] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697061, 'name': CreateSnapshot_Task, 'duration_secs': 0.540207} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.517452] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1262.518320] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906ddfac-3bc8-4a5c-8a6d-aa3ccf78030d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.521819] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89af560-6a5c-48a7-91d1-0879306c76c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.543349] env[63297]: DEBUG nova.compute.provider_tree [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.701569] env[63297]: INFO nova.compute.manager [-] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Took 1.03 seconds to deallocate network for instance. [ 1262.717610] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697063, 'name': Rename_Task, 'duration_secs': 0.174701} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.717958] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1262.719096] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6972ad59-37ce-4cf1-8970-0cc754a2c587 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.729328] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1262.729328] env[63297]: value = "task-1697065" [ 1262.729328] env[63297]: _type = "Task" [ 1262.729328] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.739913] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.759509] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-9b1306f9-4b0a-4116-8e79-271478f33490" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1262.759949] env[63297]: DEBUG nova.objects.instance [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lazy-loading 'migration_context' on Instance uuid 9b1306f9-4b0a-4116-8e79-271478f33490 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1262.902504] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.902746] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.926399] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1262.947276] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1262.947534] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1262.947688] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1262.947866] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1262.948020] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1262.948177] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1262.948418] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1262.948587] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1262.948753] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1262.948912] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1262.949103] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1262.949972] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2462524a-008c-4f39-94ab-88528da50010 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.959394] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a238bfde-92dd-4d63-99ee-6cf7abd08275 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.053768] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1263.054756] env[63297]: DEBUG nova.scheduler.client.report [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1263.058209] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9c783283-454c-4ba5-8cd5-887105f409e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.068653] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1263.068653] env[63297]: value = "task-1697066" [ 1263.068653] env[63297]: _type = "Task" [ 1263.068653] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.079906] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697066, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.213983] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.242303] env[63297]: DEBUG oslo_vmware.api [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697065, 'name': PowerOnVM_Task, 'duration_secs': 0.500767} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.242635] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1263.242835] env[63297]: INFO nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Took 7.36 seconds to spawn the instance on the hypervisor. [ 1263.243029] env[63297]: DEBUG nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1263.243838] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803703b5-c2eb-4f35-a97a-b0c39c0129ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.264908] env[63297]: DEBUG nova.objects.base [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Object Instance<9b1306f9-4b0a-4116-8e79-271478f33490> lazy-loaded attributes: info_cache,migration_context {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1263.265521] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a655ed-1d8e-482f-ad9e-57ae859f9978 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.292160] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-415c7323-8e29-4136-9b42-6b69beaaebd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.298981] env[63297]: DEBUG oslo_vmware.api [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1263.298981] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523f69f9-9484-76a3-ef14-1b46d10b82c4" [ 1263.298981] env[63297]: _type = "Task" [ 1263.298981] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.309221] env[63297]: DEBUG oslo_vmware.api [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523f69f9-9484-76a3-ef14-1b46d10b82c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.387316] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.387704] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.444559] env[63297]: DEBUG nova.compute.manager [req-f2702a39-dd1b-4493-8f38-5b2d12efeed6 req-6f22eaa4-fa6b-44e9-aab7-f4712ab2af29 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Received event network-vif-plugged-a0f734f5-ff87-41a5-ac99-57124d62dcda {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1263.446235] env[63297]: DEBUG oslo_concurrency.lockutils [req-f2702a39-dd1b-4493-8f38-5b2d12efeed6 req-6f22eaa4-fa6b-44e9-aab7-f4712ab2af29 service nova] Acquiring lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.446235] env[63297]: DEBUG oslo_concurrency.lockutils [req-f2702a39-dd1b-4493-8f38-5b2d12efeed6 req-6f22eaa4-fa6b-44e9-aab7-f4712ab2af29 service nova] Lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.446235] env[63297]: DEBUG oslo_concurrency.lockutils [req-f2702a39-dd1b-4493-8f38-5b2d12efeed6 req-6f22eaa4-fa6b-44e9-aab7-f4712ab2af29 service nova] Lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.446235] env[63297]: DEBUG nova.compute.manager [req-f2702a39-dd1b-4493-8f38-5b2d12efeed6 req-6f22eaa4-fa6b-44e9-aab7-f4712ab2af29 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] No waiting events found dispatching network-vif-plugged-a0f734f5-ff87-41a5-ac99-57124d62dcda {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1263.446235] env[63297]: WARNING nova.compute.manager [req-f2702a39-dd1b-4493-8f38-5b2d12efeed6 req-6f22eaa4-fa6b-44e9-aab7-f4712ab2af29 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Received unexpected event network-vif-plugged-a0f734f5-ff87-41a5-ac99-57124d62dcda for instance with vm_state building and task_state spawning. [ 1263.534531] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Successfully updated port: a0f734f5-ff87-41a5-ac99-57124d62dcda {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1263.565018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.565018] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1263.566519] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 30.391s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.566861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.567215] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1263.569016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.493s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.569016] env[63297]: DEBUG nova.objects.instance [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1263.572459] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ce7b58-ed9f-49bd-8197-16c16476c674 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.591939] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697066, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.595304] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69546d2d-fc29-47f1-b132-70b46fd73980 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.611338] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f2e285-efdd-400c-b772-68c613a95c86 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.620704] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235c28db-780c-414f-9969-ab8674a12bf9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.655736] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180824MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1263.655912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.746459] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "99cc8af3-5c18-4839-94db-996861e0c276" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.746768] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "99cc8af3-5c18-4839-94db-996861e0c276" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.766715] env[63297]: INFO nova.compute.manager [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Took 40.86 seconds to build instance. [ 1263.811039] env[63297]: DEBUG oslo_vmware.api [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523f69f9-9484-76a3-ef14-1b46d10b82c4, 'name': SearchDatastore_Task, 'duration_secs': 0.013175} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.811205] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.037816] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "refresh_cache-42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.038644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "refresh_cache-42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.038644] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1264.073398] env[63297]: DEBUG nova.compute.utils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1264.074977] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1264.075650] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1264.094651] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697066, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.125564] env[63297]: DEBUG nova.policy [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38504ea2d496450ca516b1e42b84566d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a362e1c3f514beeaaf5db9823da5cc3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1264.268147] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d4a1068b-2573-4178-9271-4d531c9b6cde tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "8c10c573-de56-4c72-959a-65bf53b805a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.356s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.412135] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "8c10c573-de56-4c72-959a-65bf53b805a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.412135] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "8c10c573-de56-4c72-959a-65bf53b805a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.412135] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "8c10c573-de56-4c72-959a-65bf53b805a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.412135] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "8c10c573-de56-4c72-959a-65bf53b805a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.412135] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "8c10c573-de56-4c72-959a-65bf53b805a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.413978] env[63297]: INFO nova.compute.manager [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Terminating instance [ 1264.415928] env[63297]: DEBUG nova.compute.manager [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1264.415928] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1264.416687] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeaa682-26f8-4fd0-8557-ce9a4ec7433b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.427613] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.427962] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00139f72-5c7c-4b7f-9730-b9629184156a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.437833] env[63297]: DEBUG oslo_vmware.api [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1264.437833] env[63297]: value = "task-1697067" [ 1264.437833] env[63297]: _type = "Task" [ 1264.437833] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.449845] env[63297]: DEBUG oslo_vmware.api [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697067, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.454623] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Successfully created port: d865dcd8-f11a-4c74-8534-760e6a578524 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1264.579248] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1264.586950] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1264.589979] env[63297]: DEBUG oslo_concurrency.lockutils [None req-920f2df6-61a9-4832-b617-fa017729c994 tempest-ServersAdmin275Test-2097869383 tempest-ServersAdmin275Test-2097869383-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.595258] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.937s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.597517] env[63297]: INFO nova.compute.claims [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1264.609873] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697066, 'name': CloneVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.771321] env[63297]: DEBUG nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1264.812083] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Updating instance_info_cache with network_info: [{"id": "a0f734f5-ff87-41a5-ac99-57124d62dcda", "address": "fa:16:3e:39:4f:c6", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f734f5-ff", "ovs_interfaceid": "a0f734f5-ff87-41a5-ac99-57124d62dcda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.846990] env[63297]: DEBUG oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520b8dbc-6361-8a1f-2529-edb277579b41/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1264.848387] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970cb7f1-6d2c-4d21-a59b-6c8253c28bcd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.855139] env[63297]: DEBUG oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520b8dbc-6361-8a1f-2529-edb277579b41/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1264.855349] env[63297]: ERROR oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520b8dbc-6361-8a1f-2529-edb277579b41/disk-0.vmdk due to incomplete transfer. [ 1264.855552] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b54a8cc9-78f5-4c01-962d-ff3cdcac21ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.864034] env[63297]: DEBUG oslo_vmware.rw_handles [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520b8dbc-6361-8a1f-2529-edb277579b41/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1264.864248] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Uploaded image 4d130420-a69d-4a58-8280-7f001cd178d4 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1264.866276] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1264.866535] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-07599b99-9ddc-41eb-9662-78fdc0efcf8a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.874314] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1264.874314] env[63297]: value = "task-1697068" [ 1264.874314] env[63297]: _type = "Task" [ 1264.874314] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.885745] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697068, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.948604] env[63297]: DEBUG oslo_vmware.api [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697067, 'name': PowerOffVM_Task, 'duration_secs': 0.242521} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.948873] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1264.949049] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1264.949380] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84c66713-d477-4abd-87d8-b065f02cb674 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.031651] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1265.032608] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1265.032608] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Deleting the datastore file [datastore1] 8c10c573-de56-4c72-959a-65bf53b805a5 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1265.032608] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e627ec6a-0b52-4149-8a46-67939469ff54 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.040263] env[63297]: DEBUG oslo_vmware.api [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for the task: (returnval){ [ 1265.040263] env[63297]: value = "task-1697070" [ 1265.040263] env[63297]: _type = "Task" [ 1265.040263] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.050950] env[63297]: DEBUG oslo_vmware.api [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697070, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.094458] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697066, 'name': CloneVM_Task, 'duration_secs': 1.630906} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.094966] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Created linked-clone VM from snapshot [ 1265.095801] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1cd49d-1981-4bf1-b79b-4421a2f05fe7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.106737] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Uploading image 053112fe-4ce6-4f19-9eda-3789f6f82242 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1265.129491] env[63297]: DEBUG oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1265.129491] env[63297]: value = "vm-353802" [ 1265.129491] env[63297]: _type = "VirtualMachine" [ 1265.129491] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1265.130027] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-404ac8cc-6e38-45d3-be54-b30ec8da57d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.138796] env[63297]: DEBUG oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lease: (returnval){ [ 1265.138796] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e63f10-cc42-ca68-853a-fe6a892d34f2" [ 1265.138796] env[63297]: _type = "HttpNfcLease" [ 1265.138796] env[63297]: } obtained for exporting VM: (result){ [ 1265.138796] env[63297]: value = "vm-353802" [ 1265.138796] env[63297]: _type = "VirtualMachine" [ 1265.138796] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1265.139108] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the lease: (returnval){ [ 1265.139108] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e63f10-cc42-ca68-853a-fe6a892d34f2" [ 1265.139108] env[63297]: _type = "HttpNfcLease" [ 1265.139108] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1265.147591] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1265.147591] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e63f10-cc42-ca68-853a-fe6a892d34f2" [ 1265.147591] env[63297]: _type = "HttpNfcLease" [ 1265.147591] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1265.298110] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.316150] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "refresh_cache-42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1265.316487] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Instance network_info: |[{"id": "a0f734f5-ff87-41a5-ac99-57124d62dcda", "address": "fa:16:3e:39:4f:c6", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f734f5-ff", "ovs_interfaceid": "a0f734f5-ff87-41a5-ac99-57124d62dcda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1265.316901] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:4f:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f678cd81-6d15-43d5-aab7-d7eedc2ef2d5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0f734f5-ff87-41a5-ac99-57124d62dcda', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1265.324636] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Creating folder: Project (0a362e1c3f514beeaaf5db9823da5cc3). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1265.325044] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2570a3b9-07af-42e4-8238-c2436afa44e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.340383] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Created folder: Project (0a362e1c3f514beeaaf5db9823da5cc3) in parent group-v353718. [ 1265.340672] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Creating folder: Instances. Parent ref: group-v353803. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1265.340980] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a3c262a-8103-42c5-9c81-45ff244f3907 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.356589] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Created folder: Instances in parent group-v353803. [ 1265.356589] env[63297]: DEBUG oslo.service.loopingcall [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1265.356662] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1265.356891] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d541fcbb-ed56-41eb-8225-e71afc9d7337 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.380440] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1265.380440] env[63297]: value = "task-1697074" [ 1265.380440] env[63297]: _type = "Task" [ 1265.380440] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.398760] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697068, 'name': Destroy_Task, 'duration_secs': 0.52092} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.402241] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Destroyed the VM [ 1265.402241] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1265.402241] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697074, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.402241] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6ac2ddc3-f3cd-4e7e-94b6-9997e4a45906 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.410424] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1265.410424] env[63297]: value = "task-1697075" [ 1265.410424] env[63297]: _type = "Task" [ 1265.410424] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.419776] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697075, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.495937] env[63297]: DEBUG nova.compute.manager [req-16db51c5-ffe4-4f27-ba76-43646ed520d8 req-f57b8335-eaf3-4042-8426-3aa9f1e28ce5 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Received event network-changed-a0f734f5-ff87-41a5-ac99-57124d62dcda {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1265.496068] env[63297]: DEBUG nova.compute.manager [req-16db51c5-ffe4-4f27-ba76-43646ed520d8 req-f57b8335-eaf3-4042-8426-3aa9f1e28ce5 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Refreshing instance network info cache due to event network-changed-a0f734f5-ff87-41a5-ac99-57124d62dcda. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1265.496293] env[63297]: DEBUG oslo_concurrency.lockutils [req-16db51c5-ffe4-4f27-ba76-43646ed520d8 req-f57b8335-eaf3-4042-8426-3aa9f1e28ce5 service nova] Acquiring lock "refresh_cache-42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1265.496482] env[63297]: DEBUG oslo_concurrency.lockutils [req-16db51c5-ffe4-4f27-ba76-43646ed520d8 req-f57b8335-eaf3-4042-8426-3aa9f1e28ce5 service nova] Acquired lock "refresh_cache-42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.496662] env[63297]: DEBUG nova.network.neutron [req-16db51c5-ffe4-4f27-ba76-43646ed520d8 req-f57b8335-eaf3-4042-8426-3aa9f1e28ce5 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Refreshing network info cache for port a0f734f5-ff87-41a5-ac99-57124d62dcda {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1265.551899] env[63297]: DEBUG oslo_vmware.api [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Task: {'id': task-1697070, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237375} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.552444] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.552444] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1265.552614] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1265.552840] env[63297]: INFO nova.compute.manager [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1265.553130] env[63297]: DEBUG oslo.service.loopingcall [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1265.553350] env[63297]: DEBUG nova.compute.manager [-] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1265.553452] env[63297]: DEBUG nova.network.neutron [-] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1265.596232] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1265.619857] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1265.620116] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1265.620275] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1265.620475] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1265.620597] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1265.620744] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1265.620949] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1265.621167] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1265.621373] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1265.621546] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1265.621719] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1265.622685] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3c212f-c2eb-45f2-a339-f81328d977a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.632443] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f2ad0a-952c-49df-b131-067e784f40d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.664128] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1265.664128] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e63f10-cc42-ca68-853a-fe6a892d34f2" [ 1265.664128] env[63297]: _type = "HttpNfcLease" [ 1265.664128] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1265.664449] env[63297]: DEBUG oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1265.664449] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e63f10-cc42-ca68-853a-fe6a892d34f2" [ 1265.664449] env[63297]: _type = "HttpNfcLease" [ 1265.664449] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1265.665234] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6bc3a6-a498-4b41-b433-f8a398483695 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.673418] env[63297]: DEBUG oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffe1d9-b4cd-8612-92ea-b81245f985d7/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1265.673655] env[63297]: DEBUG oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffe1d9-b4cd-8612-92ea-b81245f985d7/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1265.775559] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e01f8728-6b2d-4a0c-86eb-b8441a705ade {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.895978] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697074, 'name': CreateVM_Task, 'duration_secs': 0.384061} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.897313] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1265.900567] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1265.900800] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.901147] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1265.901854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Acquiring lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.902207] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.902334] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af75a51b-629d-4ba9-b26c-ecc4f46d5643 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.909866] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1265.909866] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a26599-c095-669e-4937-d97ca53c0a3b" [ 1265.909866] env[63297]: _type = "Task" [ 1265.909866] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.928755] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697075, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.929118] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a26599-c095-669e-4937-d97ca53c0a3b, 'name': SearchDatastore_Task, 'duration_secs': 0.009894} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.930126] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1265.930126] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1265.930126] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1265.930307] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.930448] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1265.930666] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f85390a-bf53-4841-9f1c-a5b0bc4e6418 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.945144] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1265.945456] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1265.947060] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ff205de-6c21-4d44-b770-5d6425860485 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.951754] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1265.951754] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521075e9-090b-1baa-c509-fe6b5091fa06" [ 1265.951754] env[63297]: _type = "Task" [ 1265.951754] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.959369] env[63297]: DEBUG nova.compute.manager [req-e14ff6a9-93a8-4c0b-a223-182ca31c40f8 req-30af1798-b7c7-4709-bb87-83a173ed02fe service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Received event network-vif-deleted-1e0746e7-53ea-4197-8089-36ec9df7fadb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1265.959369] env[63297]: INFO nova.compute.manager [req-e14ff6a9-93a8-4c0b-a223-182ca31c40f8 req-30af1798-b7c7-4709-bb87-83a173ed02fe service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Neutron deleted interface 1e0746e7-53ea-4197-8089-36ec9df7fadb; detaching it from the instance and deleting it from the info cache [ 1265.959492] env[63297]: DEBUG nova.network.neutron [req-e14ff6a9-93a8-4c0b-a223-182ca31c40f8 req-30af1798-b7c7-4709-bb87-83a173ed02fe service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.969218] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521075e9-090b-1baa-c509-fe6b5091fa06, 'name': SearchDatastore_Task, 'duration_secs': 0.011065} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.970058] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-064b8035-1b9f-400a-9e17-317c66036ef2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.979074] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1265.979074] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529877b7-8917-ec19-0d52-2df6b1234d87" [ 1265.979074] env[63297]: _type = "Task" [ 1265.979074] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.990335] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529877b7-8917-ec19-0d52-2df6b1234d87, 'name': SearchDatastore_Task, 'duration_secs': 0.009903} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.993307] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1265.993498] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a/42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1265.993954] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eef18bc3-f988-4dad-baa7-8684d2b13a43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.002370] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1266.002370] env[63297]: value = "task-1697076" [ 1266.002370] env[63297]: _type = "Task" [ 1266.002370] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.017464] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.254882] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Successfully updated port: d865dcd8-f11a-4c74-8534-760e6a578524 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1266.256955] env[63297]: DEBUG nova.network.neutron [req-16db51c5-ffe4-4f27-ba76-43646ed520d8 req-f57b8335-eaf3-4042-8426-3aa9f1e28ce5 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Updated VIF entry in instance network info cache for port a0f734f5-ff87-41a5-ac99-57124d62dcda. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1266.257400] env[63297]: DEBUG nova.network.neutron [req-16db51c5-ffe4-4f27-ba76-43646ed520d8 req-f57b8335-eaf3-4042-8426-3aa9f1e28ce5 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Updating instance_info_cache with network_info: [{"id": "a0f734f5-ff87-41a5-ac99-57124d62dcda", "address": "fa:16:3e:39:4f:c6", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f734f5-ff", "ovs_interfaceid": "a0f734f5-ff87-41a5-ac99-57124d62dcda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.323198] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e6c6c9-aafc-4194-ba5e-c601debeb658 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.331910] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92590b0d-8c6c-4d6a-b748-619573befc7c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.367400] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63116782-b97e-400c-bf41-2c0ee08dd91c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.377558] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5314b213-0c37-45a8-95ae-1eb8a233fd49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.396884] env[63297]: DEBUG nova.compute.provider_tree [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.424786] env[63297]: DEBUG oslo_vmware.api [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697075, 'name': RemoveSnapshot_Task, 'duration_secs': 0.53058} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.425099] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1266.425405] env[63297]: INFO nova.compute.manager [None req-82f88803-8e6b-46d4-8d84-a55eb95222b4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Took 15.64 seconds to snapshot the instance on the hypervisor. [ 1266.430494] env[63297]: DEBUG nova.network.neutron [-] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.465199] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60466583-9047-4cf3-aaec-9c8e8d4d02bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.478632] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5420de7c-ac68-4463-a3e8-20490d03082c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.520277] env[63297]: DEBUG nova.compute.manager [req-e14ff6a9-93a8-4c0b-a223-182ca31c40f8 req-30af1798-b7c7-4709-bb87-83a173ed02fe service nova] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Detach interface failed, port_id=1e0746e7-53ea-4197-8089-36ec9df7fadb, reason: Instance 8c10c573-de56-4c72-959a-65bf53b805a5 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1266.532160] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697076, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.763687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "refresh_cache-eebcad60-4b8a-4fa0-b846-b65972c4c69c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.763687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "refresh_cache-eebcad60-4b8a-4fa0-b846-b65972c4c69c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.763687] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1266.766429] env[63297]: DEBUG oslo_concurrency.lockutils [req-16db51c5-ffe4-4f27-ba76-43646ed520d8 req-f57b8335-eaf3-4042-8426-3aa9f1e28ce5 service nova] Releasing lock "refresh_cache-42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.900576] env[63297]: DEBUG nova.scheduler.client.report [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1266.933733] env[63297]: INFO nova.compute.manager [-] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Took 1.38 seconds to deallocate network for instance. [ 1267.032493] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697076, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.310023] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1267.407524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.815s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.407807] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1267.411724] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.767s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.412050] env[63297]: DEBUG nova.objects.instance [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lazy-loading 'resources' on Instance uuid 13706c85-c23e-47cd-a7d8-2e902c11a7fb {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1267.440761] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.491792] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Updating instance_info_cache with network_info: [{"id": "d865dcd8-f11a-4c74-8534-760e6a578524", "address": "fa:16:3e:07:05:0a", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd865dcd8-f1", "ovs_interfaceid": "d865dcd8-f11a-4c74-8534-760e6a578524", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.517370] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquiring lock "746742ac-8d7a-466b-8bc0-043cb5422111" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.517370] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "746742ac-8d7a-466b-8bc0-043cb5422111" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.517370] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquiring lock "746742ac-8d7a-466b-8bc0-043cb5422111-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.517854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "746742ac-8d7a-466b-8bc0-043cb5422111-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.518210] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "746742ac-8d7a-466b-8bc0-043cb5422111-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.520900] env[63297]: INFO nova.compute.manager [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Terminating instance [ 1267.527123] env[63297]: DEBUG nova.compute.manager [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1267.528716] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1267.528716] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d5367c-ff22-4dbd-871b-b4f79f141460 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.535289] env[63297]: DEBUG nova.compute.manager [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Received event network-vif-plugged-d865dcd8-f11a-4c74-8534-760e6a578524 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1267.535645] env[63297]: DEBUG oslo_concurrency.lockutils [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] Acquiring lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.536360] env[63297]: DEBUG oslo_concurrency.lockutils [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] Lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.536769] env[63297]: DEBUG oslo_concurrency.lockutils [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] Lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.537087] env[63297]: DEBUG nova.compute.manager [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] No waiting events found dispatching network-vif-plugged-d865dcd8-f11a-4c74-8534-760e6a578524 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1267.537355] env[63297]: WARNING nova.compute.manager [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Received unexpected event network-vif-plugged-d865dcd8-f11a-4c74-8534-760e6a578524 for instance with vm_state building and task_state spawning. [ 1267.537680] env[63297]: DEBUG nova.compute.manager [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Received event network-changed-d865dcd8-f11a-4c74-8534-760e6a578524 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1267.537926] env[63297]: DEBUG nova.compute.manager [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Refreshing instance network info cache due to event network-changed-d865dcd8-f11a-4c74-8534-760e6a578524. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1267.538343] env[63297]: DEBUG oslo_concurrency.lockutils [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] Acquiring lock "refresh_cache-eebcad60-4b8a-4fa0-b846-b65972c4c69c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.544987] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697076, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.545447] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1267.545826] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c00a1263-356c-4ef7-b125-669a1c15943a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.558043] env[63297]: DEBUG oslo_vmware.api [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1267.558043] env[63297]: value = "task-1697077" [ 1267.558043] env[63297]: _type = "Task" [ 1267.558043] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.568744] env[63297]: DEBUG oslo_vmware.api [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697077, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.913598] env[63297]: DEBUG nova.compute.utils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1267.915209] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1267.915418] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1267.998142] env[63297]: DEBUG nova.policy [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38504ea2d496450ca516b1e42b84566d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a362e1c3f514beeaaf5db9823da5cc3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1268.000056] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "refresh_cache-eebcad60-4b8a-4fa0-b846-b65972c4c69c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1268.000056] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Instance network_info: |[{"id": "d865dcd8-f11a-4c74-8534-760e6a578524", "address": "fa:16:3e:07:05:0a", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd865dcd8-f1", "ovs_interfaceid": "d865dcd8-f11a-4c74-8534-760e6a578524", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1268.000359] env[63297]: DEBUG oslo_concurrency.lockutils [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] Acquired lock "refresh_cache-eebcad60-4b8a-4fa0-b846-b65972c4c69c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.000540] env[63297]: DEBUG nova.network.neutron [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Refreshing network info cache for port d865dcd8-f11a-4c74-8534-760e6a578524 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1268.001915] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:05:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f678cd81-6d15-43d5-aab7-d7eedc2ef2d5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd865dcd8-f11a-4c74-8534-760e6a578524', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1268.011942] env[63297]: DEBUG oslo.service.loopingcall [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1268.015153] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1268.016602] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4f770ae-2b26-41d7-8b51-c138517976c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.045045] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697076, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.582691} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.051119] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a/42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1268.051371] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1268.051630] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1268.051630] env[63297]: value = "task-1697078" [ 1268.051630] env[63297]: _type = "Task" [ 1268.051630] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.052113] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-275dca29-947f-4094-9f8f-ca7b413e8ea1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.070756] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1268.070756] env[63297]: value = "task-1697079" [ 1268.070756] env[63297]: _type = "Task" [ 1268.070756] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.071824] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697078, 'name': CreateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.079461] env[63297]: DEBUG oslo_vmware.api [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697077, 'name': PowerOffVM_Task, 'duration_secs': 0.299169} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.080472] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1268.080662] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1268.081044] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b01c0296-8d50-4f9d-839b-21e5eae0d1cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.087162] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697079, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.361099] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Successfully created port: cf08728a-cae2-4f43-af9e-94a167d2750a {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1268.419602] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1268.486792] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0157e6-b470-424b-8635-6561e2c07df2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.495552] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b90350a-469d-4d89-9f8c-9bee32471408 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.535191] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30071533-bb37-44ac-bd1e-e009ef11274f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.543249] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1268.543490] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1268.547204] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Deleting the datastore file [datastore1] 746742ac-8d7a-466b-8bc0-043cb5422111 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1268.547789] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc2cc358-e8a5-44c0-aedc-731136250f94 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.551213] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fdbc62-bed7-4681-b509-afdf468e4f7a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.565463] env[63297]: DEBUG oslo_vmware.api [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for the task: (returnval){ [ 1268.565463] env[63297]: value = "task-1697081" [ 1268.565463] env[63297]: _type = "Task" [ 1268.565463] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.577253] env[63297]: DEBUG nova.compute.provider_tree [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.596498] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697078, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.604739] env[63297]: DEBUG oslo_vmware.api [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.608340] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697079, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.246052} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.608708] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1268.609792] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbe7d1b-4962-4cde-8882-a3125c3d8f59 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.641029] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a/42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1268.641029] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fba543f8-464e-4e39-ba97-f16e010e2be4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.663820] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1268.663820] env[63297]: value = "task-1697082" [ 1268.663820] env[63297]: _type = "Task" [ 1268.663820] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.674798] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697082, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.841909] env[63297]: DEBUG nova.network.neutron [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Updated VIF entry in instance network info cache for port d865dcd8-f11a-4c74-8534-760e6a578524. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1268.842372] env[63297]: DEBUG nova.network.neutron [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Updating instance_info_cache with network_info: [{"id": "d865dcd8-f11a-4c74-8534-760e6a578524", "address": "fa:16:3e:07:05:0a", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd865dcd8-f1", "ovs_interfaceid": "d865dcd8-f11a-4c74-8534-760e6a578524", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.068077] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697078, 'name': CreateVM_Task, 'duration_secs': 0.610197} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.068226] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1269.068940] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1269.069134] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.069458] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1269.069743] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-336531f5-dc97-4f05-a302-989f9fe50480 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.075775] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1269.075775] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ba3000-4563-d1c4-1e9b-58b5a1767b09" [ 1269.075775] env[63297]: _type = "Task" [ 1269.075775] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.089031] env[63297]: DEBUG oslo_vmware.api [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Task: {'id': task-1697081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173213} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.092846] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1269.094020] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1269.094020] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1269.094020] env[63297]: INFO nova.compute.manager [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Took 1.57 seconds to destroy the instance on the hypervisor. [ 1269.094020] env[63297]: DEBUG oslo.service.loopingcall [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1269.094215] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ba3000-4563-d1c4-1e9b-58b5a1767b09, 'name': SearchDatastore_Task, 'duration_secs': 0.011435} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.094409] env[63297]: DEBUG nova.compute.manager [-] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1269.094565] env[63297]: DEBUG nova.network.neutron [-] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1269.096244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1269.096487] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1269.096737] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1269.096885] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.097090] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1269.097978] env[63297]: DEBUG nova.scheduler.client.report [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1269.101358] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-284e8d05-fe80-4d25-a867-663534bcd861 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.114196] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1269.114346] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1269.116026] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ba2960c-577a-490b-ab42-6c553c15b65d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.123187] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1269.123187] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526490e8-88ff-3415-6f3d-e8e0d9877cd9" [ 1269.123187] env[63297]: _type = "Task" [ 1269.123187] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.132835] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526490e8-88ff-3415-6f3d-e8e0d9877cd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.175478] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697082, 'name': ReconfigVM_Task, 'duration_secs': 0.342297} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.175810] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a/42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1269.176512] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-136e472f-c40a-46fe-ad87-68ea56bff184 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.184397] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1269.184397] env[63297]: value = "task-1697083" [ 1269.184397] env[63297]: _type = "Task" [ 1269.184397] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.194399] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697083, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.345606] env[63297]: DEBUG oslo_concurrency.lockutils [req-6616f8cd-db5c-4c2c-bd6e-c7f57d2061ce req-86cd979e-36e9-4132-b035-52ee1174d0eb service nova] Releasing lock "refresh_cache-eebcad60-4b8a-4fa0-b846-b65972c4c69c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1269.435965] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1269.459388] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1269.459633] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1269.459792] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.459974] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1269.460139] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.460289] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1269.460500] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1269.460659] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1269.460823] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1269.460984] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1269.461207] env[63297]: DEBUG nova.virt.hardware [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1269.462162] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3162431f-d226-44f8-bcc7-864adeced734 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.471752] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4ebe7d-450c-4e52-ad59-cc0b3b790807 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.566609] env[63297]: DEBUG nova.compute.manager [req-6edeb6ec-239a-4781-87ef-47f261b6f2a7 req-2753b0ca-3c2b-4825-ba32-64f11b3a4305 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Received event network-vif-deleted-e756a23f-dc86-4ac9-b42f-47196093abd3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1269.566609] env[63297]: INFO nova.compute.manager [req-6edeb6ec-239a-4781-87ef-47f261b6f2a7 req-2753b0ca-3c2b-4825-ba32-64f11b3a4305 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Neutron deleted interface e756a23f-dc86-4ac9-b42f-47196093abd3; detaching it from the instance and deleting it from the info cache [ 1269.566609] env[63297]: DEBUG nova.network.neutron [req-6edeb6ec-239a-4781-87ef-47f261b6f2a7 req-2753b0ca-3c2b-4825-ba32-64f11b3a4305 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.605178] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.611028] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.571s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.611147] env[63297]: INFO nova.compute.claims [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1269.631139] env[63297]: INFO nova.scheduler.client.report [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Deleted allocations for instance 13706c85-c23e-47cd-a7d8-2e902c11a7fb [ 1269.636054] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526490e8-88ff-3415-6f3d-e8e0d9877cd9, 'name': SearchDatastore_Task, 'duration_secs': 0.011987} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.639709] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec8985e0-d199-4783-a8c9-b84233cadc77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.647629] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1269.647629] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b8e78-8caa-48b6-ffd8-9d95024c862c" [ 1269.647629] env[63297]: _type = "Task" [ 1269.647629] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.659220] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b8e78-8caa-48b6-ffd8-9d95024c862c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.695433] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697083, 'name': Rename_Task, 'duration_secs': 0.16846} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.695737] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1269.695990] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8409fd82-290b-4d63-bfad-539ccf1a660e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.703502] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1269.703502] env[63297]: value = "task-1697084" [ 1269.703502] env[63297]: _type = "Task" [ 1269.703502] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.714350] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697084, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.866231] env[63297]: DEBUG nova.network.neutron [-] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.069509] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d36e3e2-13c1-4b85-bdaa-eb5c528e3f5e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.080406] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a70b72f-8915-46bc-9ac5-9dabe52b2141 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.119452] env[63297]: DEBUG nova.compute.manager [req-6edeb6ec-239a-4781-87ef-47f261b6f2a7 req-2753b0ca-3c2b-4825-ba32-64f11b3a4305 service nova] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Detach interface failed, port_id=e756a23f-dc86-4ac9-b42f-47196093abd3, reason: Instance 746742ac-8d7a-466b-8bc0-043cb5422111 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1270.138378] env[63297]: DEBUG nova.compute.manager [req-21c2c0ce-5492-4a07-8406-d8671b2dfe92 req-0ce9dea3-8d54-4c12-9c65-5de8a0b32b7b service nova] [instance: 92439795-6240-4103-940b-de6d87738570] Received event network-vif-plugged-cf08728a-cae2-4f43-af9e-94a167d2750a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1270.138605] env[63297]: DEBUG oslo_concurrency.lockutils [req-21c2c0ce-5492-4a07-8406-d8671b2dfe92 req-0ce9dea3-8d54-4c12-9c65-5de8a0b32b7b service nova] Acquiring lock "92439795-6240-4103-940b-de6d87738570-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.138864] env[63297]: DEBUG oslo_concurrency.lockutils [req-21c2c0ce-5492-4a07-8406-d8671b2dfe92 req-0ce9dea3-8d54-4c12-9c65-5de8a0b32b7b service nova] Lock "92439795-6240-4103-940b-de6d87738570-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.139359] env[63297]: DEBUG oslo_concurrency.lockutils [req-21c2c0ce-5492-4a07-8406-d8671b2dfe92 req-0ce9dea3-8d54-4c12-9c65-5de8a0b32b7b service nova] Lock "92439795-6240-4103-940b-de6d87738570-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.139557] env[63297]: DEBUG nova.compute.manager [req-21c2c0ce-5492-4a07-8406-d8671b2dfe92 req-0ce9dea3-8d54-4c12-9c65-5de8a0b32b7b service nova] [instance: 92439795-6240-4103-940b-de6d87738570] No waiting events found dispatching network-vif-plugged-cf08728a-cae2-4f43-af9e-94a167d2750a {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1270.139722] env[63297]: WARNING nova.compute.manager [req-21c2c0ce-5492-4a07-8406-d8671b2dfe92 req-0ce9dea3-8d54-4c12-9c65-5de8a0b32b7b service nova] [instance: 92439795-6240-4103-940b-de6d87738570] Received unexpected event network-vif-plugged-cf08728a-cae2-4f43-af9e-94a167d2750a for instance with vm_state building and task_state spawning. [ 1270.146566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0014cd40-3272-460d-b369-09c1707e3c7e tempest-ServersAdmin275Test-1607202124 tempest-ServersAdmin275Test-1607202124-project-member] Lock "13706c85-c23e-47cd-a7d8-2e902c11a7fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.361s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.161712] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b8e78-8caa-48b6-ffd8-9d95024c862c, 'name': SearchDatastore_Task, 'duration_secs': 0.01171} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.161844] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.162089] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] eebcad60-4b8a-4fa0-b846-b65972c4c69c/eebcad60-4b8a-4fa0-b846-b65972c4c69c.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1270.162819] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-273f3078-59a9-4190-b48c-a99bd7277869 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.173133] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1270.173133] env[63297]: value = "task-1697085" [ 1270.173133] env[63297]: _type = "Task" [ 1270.173133] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.186290] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.215989] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697084, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.290766] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Successfully updated port: cf08728a-cae2-4f43-af9e-94a167d2750a {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1270.369901] env[63297]: INFO nova.compute.manager [-] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Took 1.27 seconds to deallocate network for instance. [ 1270.690098] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697085, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.724455] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697084, 'name': PowerOnVM_Task, 'duration_secs': 0.631859} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.727473] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1270.728277] env[63297]: INFO nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1270.728585] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1270.729961] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10106e7d-7ae7-4abe-b0fd-509c07b4ea15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.794640] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "refresh_cache-92439795-6240-4103-940b-de6d87738570" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1270.794818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "refresh_cache-92439795-6240-4103-940b-de6d87738570" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.795837] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1270.882977] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.162864] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333d34c6-0e23-41ae-a1e0-5f8d0d232ed2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.171582] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7b7796-402d-4e80-96ef-3a615664165c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.183866] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697085, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67299} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.207896] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] eebcad60-4b8a-4fa0-b846-b65972c4c69c/eebcad60-4b8a-4fa0-b846-b65972c4c69c.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1271.207896] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1271.208321] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d32b5ff1-ba29-4a8d-bf73-8f586f1a469e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.210883] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44aa2aba-6b05-4f54-9a9e-7d127df9dd10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.220444] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28350542-805e-414f-8866-cbe779c84710 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.224495] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1271.224495] env[63297]: value = "task-1697086" [ 1271.224495] env[63297]: _type = "Task" [ 1271.224495] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.237833] env[63297]: DEBUG nova.compute.provider_tree [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.251010] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.254570] env[63297]: INFO nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Took 45.06 seconds to build instance. [ 1271.350816] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1271.629222] env[63297]: DEBUG nova.network.neutron [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Updating instance_info_cache with network_info: [{"id": "cf08728a-cae2-4f43-af9e-94a167d2750a", "address": "fa:16:3e:bd:fc:ef", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf08728a-ca", "ovs_interfaceid": "cf08728a-cae2-4f43-af9e-94a167d2750a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.735998] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099627} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.736331] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1271.737714] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b0e549-a339-493d-b0d1-bc924a0aeaeb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.741017] env[63297]: DEBUG nova.scheduler.client.report [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1271.757883] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.375s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.766923] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] eebcad60-4b8a-4fa0-b846-b65972c4c69c/eebcad60-4b8a-4fa0-b846-b65972c4c69c.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1271.772228] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7789367d-44be-46e5-b9d2-7f38844a1186 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.792163] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1271.792163] env[63297]: value = "task-1697087" [ 1271.792163] env[63297]: _type = "Task" [ 1271.792163] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.801470] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.135025] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "refresh_cache-92439795-6240-4103-940b-de6d87738570" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.135025] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Instance network_info: |[{"id": "cf08728a-cae2-4f43-af9e-94a167d2750a", "address": "fa:16:3e:bd:fc:ef", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf08728a-ca", "ovs_interfaceid": "cf08728a-cae2-4f43-af9e-94a167d2750a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1272.135025] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:fc:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f678cd81-6d15-43d5-aab7-d7eedc2ef2d5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf08728a-cae2-4f43-af9e-94a167d2750a', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1272.144729] env[63297]: DEBUG oslo.service.loopingcall [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1272.145173] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92439795-6240-4103-940b-de6d87738570] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1272.145946] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc4e73a3-a95b-41e6-b3ed-edb4db0787e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.174672] env[63297]: DEBUG nova.compute.manager [req-a3937e08-ec8f-444f-9e36-78ca1ab3299d req-dfe266ab-431d-49ed-b5a1-0801deb3ff9e service nova] [instance: 92439795-6240-4103-940b-de6d87738570] Received event network-changed-cf08728a-cae2-4f43-af9e-94a167d2750a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1272.174672] env[63297]: DEBUG nova.compute.manager [req-a3937e08-ec8f-444f-9e36-78ca1ab3299d req-dfe266ab-431d-49ed-b5a1-0801deb3ff9e service nova] [instance: 92439795-6240-4103-940b-de6d87738570] Refreshing instance network info cache due to event network-changed-cf08728a-cae2-4f43-af9e-94a167d2750a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1272.174672] env[63297]: DEBUG oslo_concurrency.lockutils [req-a3937e08-ec8f-444f-9e36-78ca1ab3299d req-dfe266ab-431d-49ed-b5a1-0801deb3ff9e service nova] Acquiring lock "refresh_cache-92439795-6240-4103-940b-de6d87738570" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1272.174672] env[63297]: DEBUG oslo_concurrency.lockutils [req-a3937e08-ec8f-444f-9e36-78ca1ab3299d req-dfe266ab-431d-49ed-b5a1-0801deb3ff9e service nova] Acquired lock "refresh_cache-92439795-6240-4103-940b-de6d87738570" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.174672] env[63297]: DEBUG nova.network.neutron [req-a3937e08-ec8f-444f-9e36-78ca1ab3299d req-dfe266ab-431d-49ed-b5a1-0801deb3ff9e service nova] [instance: 92439795-6240-4103-940b-de6d87738570] Refreshing network info cache for port cf08728a-cae2-4f43-af9e-94a167d2750a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1272.176871] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1272.176871] env[63297]: value = "task-1697088" [ 1272.176871] env[63297]: _type = "Task" [ 1272.176871] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.188060] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697088, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.247082] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.639s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.247693] env[63297]: DEBUG nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1272.251509] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.730s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.253040] env[63297]: INFO nova.compute.claims [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.270739] env[63297]: DEBUG nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1272.306603] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697087, 'name': ReconfigVM_Task, 'duration_secs': 0.324648} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.308470] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Reconfigured VM instance instance-0000001b to attach disk [datastore1] eebcad60-4b8a-4fa0-b846-b65972c4c69c/eebcad60-4b8a-4fa0-b846-b65972c4c69c.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1272.309670] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fdc9047-0190-485f-bc78-fdf3f4468d54 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.318755] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1272.318755] env[63297]: value = "task-1697089" [ 1272.318755] env[63297]: _type = "Task" [ 1272.318755] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.331773] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697089, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.695673] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697088, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.758028] env[63297]: DEBUG nova.compute.utils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1272.761552] env[63297]: DEBUG nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1272.761728] env[63297]: DEBUG nova.network.neutron [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1272.795925] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.829763] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697089, 'name': Rename_Task, 'duration_secs': 0.160922} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.830077] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.830337] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00d4bc7e-7831-4634-bcce-412ca21fe1c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.838176] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1272.838176] env[63297]: value = "task-1697090" [ 1272.838176] env[63297]: _type = "Task" [ 1272.838176] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.847293] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697090, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.850651] env[63297]: DEBUG nova.policy [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54247dac79134e97952bbfe10d02115d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '06d0bb56c7cc4beb9e53cc65fd6063b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1272.932269] env[63297]: DEBUG nova.network.neutron [req-a3937e08-ec8f-444f-9e36-78ca1ab3299d req-dfe266ab-431d-49ed-b5a1-0801deb3ff9e service nova] [instance: 92439795-6240-4103-940b-de6d87738570] Updated VIF entry in instance network info cache for port cf08728a-cae2-4f43-af9e-94a167d2750a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1272.932723] env[63297]: DEBUG nova.network.neutron [req-a3937e08-ec8f-444f-9e36-78ca1ab3299d req-dfe266ab-431d-49ed-b5a1-0801deb3ff9e service nova] [instance: 92439795-6240-4103-940b-de6d87738570] Updating instance_info_cache with network_info: [{"id": "cf08728a-cae2-4f43-af9e-94a167d2750a", "address": "fa:16:3e:bd:fc:ef", "network": {"id": "d45a878d-62b0-4131-acb7-0d69bf7ddec8", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-835758244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a362e1c3f514beeaaf5db9823da5cc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf08728a-ca", "ovs_interfaceid": "cf08728a-cae2-4f43-af9e-94a167d2750a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.191679] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697088, 'name': CreateVM_Task, 'duration_secs': 0.525168} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.191852] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92439795-6240-4103-940b-de6d87738570] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1273.192582] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.192765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.193179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1273.193430] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e77f3a06-8170-46f4-869f-9c73c825b164 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.200392] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1273.200392] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522eed15-c7da-8601-f0f1-33a272645532" [ 1273.200392] env[63297]: _type = "Task" [ 1273.200392] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.210583] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522eed15-c7da-8601-f0f1-33a272645532, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.262209] env[63297]: DEBUG nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1273.353598] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697090, 'name': PowerOnVM_Task, 'duration_secs': 0.478691} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.353999] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1273.354237] env[63297]: INFO nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Took 7.76 seconds to spawn the instance on the hypervisor. [ 1273.354411] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1273.356783] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4c34be-44c1-49ec-bfc2-f65f5384e90c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.439296] env[63297]: DEBUG oslo_concurrency.lockutils [req-a3937e08-ec8f-444f-9e36-78ca1ab3299d req-dfe266ab-431d-49ed-b5a1-0801deb3ff9e service nova] Releasing lock "refresh_cache-92439795-6240-4103-940b-de6d87738570" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1273.523384] env[63297]: DEBUG nova.network.neutron [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Successfully created port: 6b15b0b7-1c3f-4da4-80bd-92d9d77975cd {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1273.712664] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522eed15-c7da-8601-f0f1-33a272645532, 'name': SearchDatastore_Task, 'duration_secs': 0.018493} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.715471] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1273.715859] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1273.716138] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.716290] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.716472] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1273.721075] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9922a29d-be24-47e0-9bcc-e5f3993737b9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.734064] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1273.734064] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1273.735024] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-621a7df6-7978-41a3-b584-1d8f47fd1075 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.744214] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1273.744214] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dda417-07d1-f841-92e2-da94ffade945" [ 1273.744214] env[63297]: _type = "Task" [ 1273.744214] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.756729] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dda417-07d1-f841-92e2-da94ffade945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.885043] env[63297]: INFO nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Took 41.61 seconds to build instance. [ 1273.942775] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6a017a-3564-481a-996c-9058425398b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.954678] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f649af6d-4b05-4ac4-af14-a16efff776a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.998402] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4f4430-1bd6-43f5-a8bb-9da1ab35cdfd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.012817] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54536927-150c-42a5-a6b0-0a821fd86d8a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.028283] env[63297]: DEBUG nova.compute.provider_tree [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.042204] env[63297]: DEBUG oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffe1d9-b4cd-8612-92ea-b81245f985d7/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1274.043378] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e3ce9f-6066-480a-9b53-8d9153bca8f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.050624] env[63297]: DEBUG oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffe1d9-b4cd-8612-92ea-b81245f985d7/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1274.050813] env[63297]: ERROR oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffe1d9-b4cd-8612-92ea-b81245f985d7/disk-0.vmdk due to incomplete transfer. [ 1274.051069] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c2b59711-a772-4402-9523-b7b7a240ceb2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.061765] env[63297]: DEBUG oslo_vmware.rw_handles [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffe1d9-b4cd-8612-92ea-b81245f985d7/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1274.061972] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Uploaded image 053112fe-4ce6-4f19-9eda-3789f6f82242 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1274.064268] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1274.064541] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-39a81aff-832a-4ca4-965b-03cdb034afd8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.074171] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1274.074171] env[63297]: value = "task-1697091" [ 1274.074171] env[63297]: _type = "Task" [ 1274.074171] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.085845] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697091, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.256020] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dda417-07d1-f841-92e2-da94ffade945, 'name': SearchDatastore_Task, 'duration_secs': 0.018952} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.256020] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53df5185-8dcb-4db9-8cf0-fae7a910a7eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.262670] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1274.262670] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52239e8e-ab2a-9aa6-494d-9082dd0b9386" [ 1274.262670] env[63297]: _type = "Task" [ 1274.262670] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.275584] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52239e8e-ab2a-9aa6-494d-9082dd0b9386, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.278846] env[63297]: DEBUG nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1274.316458] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1274.317502] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1274.317502] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1274.317502] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1274.317873] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1274.318149] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1274.318480] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1274.318840] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1274.319164] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1274.320046] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1274.320046] env[63297]: DEBUG nova.virt.hardware [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1274.320678] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f16988a-4bd3-4bca-a3c3-05fd0631a153 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.330607] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2782029c-c16e-4173-ab40-df3f9c8150ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.388587] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.941s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.538471] env[63297]: DEBUG nova.scheduler.client.report [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1274.585635] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697091, 'name': Destroy_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.773852] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52239e8e-ab2a-9aa6-494d-9082dd0b9386, 'name': SearchDatastore_Task, 'duration_secs': 0.047989} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.774249] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1274.774627] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 92439795-6240-4103-940b-de6d87738570/92439795-6240-4103-940b-de6d87738570.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1274.775062] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76c94ffd-20a4-45e2-b9a8-9e0e044fe7b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.784272] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1274.784272] env[63297]: value = "task-1697092" [ 1274.784272] env[63297]: _type = "Task" [ 1274.784272] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.793043] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.890292] env[63297]: DEBUG nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1275.048171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.794s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.048171] env[63297]: DEBUG nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1275.048943] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.930s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.050458] env[63297]: INFO nova.compute.claims [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1275.091628] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697091, 'name': Destroy_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.295773] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.421978] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.556136] env[63297]: DEBUG nova.compute.utils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1275.561197] env[63297]: DEBUG nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1275.561197] env[63297]: DEBUG nova.network.neutron [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1275.596719] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697091, 'name': Destroy_Task, 'duration_secs': 1.378491} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.597035] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Destroyed the VM [ 1275.597286] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1275.597548] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-597fddfe-7405-4688-9e88-02e223ad454d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.606448] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1275.606448] env[63297]: value = "task-1697093" [ 1275.606448] env[63297]: _type = "Task" [ 1275.606448] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.622429] env[63297]: DEBUG nova.policy [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc34dd0686ef4ea080e597d24f0702d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b519161d82f9457eaebe7ea8a533b454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1275.623939] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697093, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.772325] env[63297]: DEBUG nova.compute.manager [req-685d55e3-8c8b-4adb-9d33-16938e2996f4 req-bfe680c0-6f36-44d6-b274-24373f9141b6 service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Received event network-vif-plugged-6b15b0b7-1c3f-4da4-80bd-92d9d77975cd {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1275.772325] env[63297]: DEBUG oslo_concurrency.lockutils [req-685d55e3-8c8b-4adb-9d33-16938e2996f4 req-bfe680c0-6f36-44d6-b274-24373f9141b6 service nova] Acquiring lock "87fa97a7-a8a5-4184-b52a-b02ad5468127-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.772325] env[63297]: DEBUG oslo_concurrency.lockutils [req-685d55e3-8c8b-4adb-9d33-16938e2996f4 req-bfe680c0-6f36-44d6-b274-24373f9141b6 service nova] Lock "87fa97a7-a8a5-4184-b52a-b02ad5468127-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.772325] env[63297]: DEBUG oslo_concurrency.lockutils [req-685d55e3-8c8b-4adb-9d33-16938e2996f4 req-bfe680c0-6f36-44d6-b274-24373f9141b6 service nova] Lock "87fa97a7-a8a5-4184-b52a-b02ad5468127-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.772325] env[63297]: DEBUG nova.compute.manager [req-685d55e3-8c8b-4adb-9d33-16938e2996f4 req-bfe680c0-6f36-44d6-b274-24373f9141b6 service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] No waiting events found dispatching network-vif-plugged-6b15b0b7-1c3f-4da4-80bd-92d9d77975cd {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1275.772325] env[63297]: WARNING nova.compute.manager [req-685d55e3-8c8b-4adb-9d33-16938e2996f4 req-bfe680c0-6f36-44d6-b274-24373f9141b6 service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Received unexpected event network-vif-plugged-6b15b0b7-1c3f-4da4-80bd-92d9d77975cd for instance with vm_state building and task_state spawning. [ 1275.802176] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697092, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.981988} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.802707] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 92439795-6240-4103-940b-de6d87738570/92439795-6240-4103-940b-de6d87738570.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1275.803031] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1275.803385] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-abd7475d-a688-42ea-9f1e-d68a7d356ac1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.812706] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1275.812706] env[63297]: value = "task-1697094" [ 1275.812706] env[63297]: _type = "Task" [ 1275.812706] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.825463] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697094, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.944583] env[63297]: DEBUG nova.network.neutron [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Successfully updated port: 6b15b0b7-1c3f-4da4-80bd-92d9d77975cd {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1276.062260] env[63297]: DEBUG nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1276.126190] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697093, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.126803] env[63297]: DEBUG nova.network.neutron [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Successfully created port: 9e58a5c1-8922-4504-a29b-1b53dd0ff360 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1276.325080] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697094, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093932} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.325530] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1276.326431] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb446e1-91f9-4fe8-94ee-64d98dcb026d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.351369] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 92439795-6240-4103-940b-de6d87738570/92439795-6240-4103-940b-de6d87738570.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1276.355175] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3e6d241-6c89-4405-952c-3ae2261ed536 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.380163] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1276.380163] env[63297]: value = "task-1697095" [ 1276.380163] env[63297]: _type = "Task" [ 1276.380163] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.392407] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697095, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.448991] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquiring lock "refresh_cache-87fa97a7-a8a5-4184-b52a-b02ad5468127" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.449197] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquired lock "refresh_cache-87fa97a7-a8a5-4184-b52a-b02ad5468127" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.449403] env[63297]: DEBUG nova.network.neutron [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1276.612518] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27398873-e2f0-4bad-983c-ec8e19d3fa3a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.628156] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f4fa57-32ac-46ac-8af5-2e2e6b07274c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.631422] env[63297]: DEBUG oslo_vmware.api [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697093, 'name': RemoveSnapshot_Task, 'duration_secs': 0.647034} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.632019] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1276.632278] env[63297]: INFO nova.compute.manager [None req-1da94cc9-3739-4255-83d4-4391da6a528f tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Took 15.73 seconds to snapshot the instance on the hypervisor. [ 1276.667399] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16fe066-4096-4d8f-a3d6-11dc3143a2ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.678766] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0686a8ae-9a87-401b-833a-ab50be85757e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.694413] env[63297]: DEBUG nova.compute.provider_tree [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.889422] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.991642] env[63297]: DEBUG nova.network.neutron [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1277.079687] env[63297]: DEBUG nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1277.122253] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.122558] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1277.136154] env[63297]: DEBUG nova.virt.hardware [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1277.139105] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fdc304-2a8c-4faf-ab0a-64e34946f915 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.153656] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3c7c61-aa1a-4c81-9378-cddcac78254f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.182225] env[63297]: DEBUG nova.network.neutron [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Updating instance_info_cache with network_info: [{"id": "6b15b0b7-1c3f-4da4-80bd-92d9d77975cd", "address": "fa:16:3e:6f:7d:a0", "network": {"id": "66931031-1baf-4534-be81-290d082814ea", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-839785853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06d0bb56c7cc4beb9e53cc65fd6063b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b15b0b7-1c", "ovs_interfaceid": "6b15b0b7-1c3f-4da4-80bd-92d9d77975cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.197271] env[63297]: DEBUG nova.scheduler.client.report [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1277.390769] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.685169] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Releasing lock "refresh_cache-87fa97a7-a8a5-4184-b52a-b02ad5468127" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.685525] env[63297]: DEBUG nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Instance network_info: |[{"id": "6b15b0b7-1c3f-4da4-80bd-92d9d77975cd", "address": "fa:16:3e:6f:7d:a0", "network": {"id": "66931031-1baf-4534-be81-290d082814ea", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-839785853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06d0bb56c7cc4beb9e53cc65fd6063b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b15b0b7-1c", "ovs_interfaceid": "6b15b0b7-1c3f-4da4-80bd-92d9d77975cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1277.685953] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:7d:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b15b0b7-1c3f-4da4-80bd-92d9d77975cd', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1277.694621] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Creating folder: Project (06d0bb56c7cc4beb9e53cc65fd6063b0). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1277.694917] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81009c77-51cb-4457-a0e5-298da1e34833 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.701976] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.702378] env[63297]: DEBUG nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1277.705586] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.901s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.705685] env[63297]: DEBUG nova.objects.instance [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lazy-loading 'resources' on Instance uuid 459d5a17-182b-4284-b464-57d342981031 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1277.710098] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Created folder: Project (06d0bb56c7cc4beb9e53cc65fd6063b0) in parent group-v353718. [ 1277.711204] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Creating folder: Instances. Parent ref: group-v353808. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1277.711204] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67aecec7-780e-42ff-8019-db6c0326ffc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.720799] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Created folder: Instances in parent group-v353808. [ 1277.721564] env[63297]: DEBUG oslo.service.loopingcall [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.721783] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1277.722013] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd46969a-4cb2-4fe7-a878-cbdc80bf05a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.744403] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1277.744403] env[63297]: value = "task-1697098" [ 1277.744403] env[63297]: _type = "Task" [ 1277.744403] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.755573] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697098, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.887584] env[63297]: DEBUG nova.compute.manager [req-b2e31e2b-5441-470f-ad9c-7121d90a7364 req-1cdade58-30bb-4c89-885b-fc003379fc5f service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Received event network-changed-6b15b0b7-1c3f-4da4-80bd-92d9d77975cd {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1277.887818] env[63297]: DEBUG nova.compute.manager [req-b2e31e2b-5441-470f-ad9c-7121d90a7364 req-1cdade58-30bb-4c89-885b-fc003379fc5f service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Refreshing instance network info cache due to event network-changed-6b15b0b7-1c3f-4da4-80bd-92d9d77975cd. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1277.887983] env[63297]: DEBUG oslo_concurrency.lockutils [req-b2e31e2b-5441-470f-ad9c-7121d90a7364 req-1cdade58-30bb-4c89-885b-fc003379fc5f service nova] Acquiring lock "refresh_cache-87fa97a7-a8a5-4184-b52a-b02ad5468127" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.888152] env[63297]: DEBUG oslo_concurrency.lockutils [req-b2e31e2b-5441-470f-ad9c-7121d90a7364 req-1cdade58-30bb-4c89-885b-fc003379fc5f service nova] Acquired lock "refresh_cache-87fa97a7-a8a5-4184-b52a-b02ad5468127" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.888299] env[63297]: DEBUG nova.network.neutron [req-b2e31e2b-5441-470f-ad9c-7121d90a7364 req-1cdade58-30bb-4c89-885b-fc003379fc5f service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Refreshing network info cache for port 6b15b0b7-1c3f-4da4-80bd-92d9d77975cd {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1277.893597] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697095, 'name': ReconfigVM_Task, 'duration_secs': 1.225252} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.894074] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 92439795-6240-4103-940b-de6d87738570/92439795-6240-4103-940b-de6d87738570.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1277.894674] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd44f212-094e-4ff5-8c35-a0b0f3dcb0e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.903200] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1277.903200] env[63297]: value = "task-1697099" [ 1277.903200] env[63297]: _type = "Task" [ 1277.903200] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.912311] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697099, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.208793] env[63297]: DEBUG nova.compute.utils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.210376] env[63297]: DEBUG nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1278.210376] env[63297]: DEBUG nova.network.neutron [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1278.261745] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697098, 'name': CreateVM_Task, 'duration_secs': 0.413746} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.266852] env[63297]: DEBUG nova.policy [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '853980bba4924061a44660dcf51b8d2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a056927390f4ff2a0305bd7e8ad5f3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1278.268426] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1278.269754] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.269924] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.270248] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1278.270828] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b13e63a-3d05-4901-8a07-82117bf6b000 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.278478] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1278.278478] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520bafd6-ae44-3d39-7e01-64b7e44d74d4" [ 1278.278478] env[63297]: _type = "Task" [ 1278.278478] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.291806] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520bafd6-ae44-3d39-7e01-64b7e44d74d4, 'name': SearchDatastore_Task, 'duration_secs': 0.010399} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.292062] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.292313] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1278.292561] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.293071] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.293071] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1278.293166] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cffe27d-0182-4cd6-8252-5c90ffb39cd1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.303601] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1278.303601] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1278.304458] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fb96874-1dd3-4e15-8feb-93def67f74b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.315747] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1278.315747] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522b077b-a16a-9db7-a994-47626489c1cb" [ 1278.315747] env[63297]: _type = "Task" [ 1278.315747] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.324659] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522b077b-a16a-9db7-a994-47626489c1cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010182} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.327098] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7c3128a-afd8-4700-86ad-b0ead1dc9525 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.331618] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1278.331618] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52335bfa-407b-5308-cad7-456e043f5aed" [ 1278.331618] env[63297]: _type = "Task" [ 1278.331618] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.344982] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52335bfa-407b-5308-cad7-456e043f5aed, 'name': SearchDatastore_Task, 'duration_secs': 0.009277} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.345414] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.345665] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 87fa97a7-a8a5-4184-b52a-b02ad5468127/87fa97a7-a8a5-4184-b52a-b02ad5468127.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1278.347140] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0d2a6e1-2d91-4662-a7ea-9fbd6f8de502 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.365167] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1278.365167] env[63297]: value = "task-1697100" [ 1278.365167] env[63297]: _type = "Task" [ 1278.365167] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.382871] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.383909] env[63297]: DEBUG nova.network.neutron [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Successfully updated port: 9e58a5c1-8922-4504-a29b-1b53dd0ff360 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1278.416197] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697099, 'name': Rename_Task, 'duration_secs': 0.150959} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.416665] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1278.416972] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49bd94ca-6efb-4001-82bd-ba9c8750c568 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.428820] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1278.428820] env[63297]: value = "task-1697101" [ 1278.428820] env[63297]: _type = "Task" [ 1278.428820] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.439254] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.517950] env[63297]: DEBUG nova.compute.manager [req-b61654ff-5d4c-45fa-9585-d051dec651c4 req-1af48345-3d15-4604-b358-aba4bed8458c service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Received event network-vif-plugged-9e58a5c1-8922-4504-a29b-1b53dd0ff360 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1278.518211] env[63297]: DEBUG oslo_concurrency.lockutils [req-b61654ff-5d4c-45fa-9585-d051dec651c4 req-1af48345-3d15-4604-b358-aba4bed8458c service nova] Acquiring lock "22a927ad-c2af-4814-b728-ec31b76a34d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.518435] env[63297]: DEBUG oslo_concurrency.lockutils [req-b61654ff-5d4c-45fa-9585-d051dec651c4 req-1af48345-3d15-4604-b358-aba4bed8458c service nova] Lock "22a927ad-c2af-4814-b728-ec31b76a34d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.518619] env[63297]: DEBUG oslo_concurrency.lockutils [req-b61654ff-5d4c-45fa-9585-d051dec651c4 req-1af48345-3d15-4604-b358-aba4bed8458c service nova] Lock "22a927ad-c2af-4814-b728-ec31b76a34d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.518796] env[63297]: DEBUG nova.compute.manager [req-b61654ff-5d4c-45fa-9585-d051dec651c4 req-1af48345-3d15-4604-b358-aba4bed8458c service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] No waiting events found dispatching network-vif-plugged-9e58a5c1-8922-4504-a29b-1b53dd0ff360 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1278.518970] env[63297]: WARNING nova.compute.manager [req-b61654ff-5d4c-45fa-9585-d051dec651c4 req-1af48345-3d15-4604-b358-aba4bed8458c service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Received unexpected event network-vif-plugged-9e58a5c1-8922-4504-a29b-1b53dd0ff360 for instance with vm_state building and task_state spawning. [ 1278.607210] env[63297]: DEBUG nova.network.neutron [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Successfully created port: 7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1278.720171] env[63297]: DEBUG nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1278.734473] env[63297]: DEBUG nova.network.neutron [req-b2e31e2b-5441-470f-ad9c-7121d90a7364 req-1cdade58-30bb-4c89-885b-fc003379fc5f service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Updated VIF entry in instance network info cache for port 6b15b0b7-1c3f-4da4-80bd-92d9d77975cd. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1278.734888] env[63297]: DEBUG nova.network.neutron [req-b2e31e2b-5441-470f-ad9c-7121d90a7364 req-1cdade58-30bb-4c89-885b-fc003379fc5f service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Updating instance_info_cache with network_info: [{"id": "6b15b0b7-1c3f-4da4-80bd-92d9d77975cd", "address": "fa:16:3e:6f:7d:a0", "network": {"id": "66931031-1baf-4534-be81-290d082814ea", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-839785853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06d0bb56c7cc4beb9e53cc65fd6063b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b15b0b7-1c", "ovs_interfaceid": "6b15b0b7-1c3f-4da4-80bd-92d9d77975cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.843993] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbbd36ca-8324-4f07-bf29-1936680a9796 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.852696] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f81f02-4005-4ce8-b410-c63ec7dd6139 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.889949] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquiring lock "refresh_cache-22a927ad-c2af-4814-b728-ec31b76a34d4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.890114] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquired lock "refresh_cache-22a927ad-c2af-4814-b728-ec31b76a34d4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.890263] env[63297]: DEBUG nova.network.neutron [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1278.898816] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c1e97c-3200-47e6-9ed4-e12b65ad27dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.908202] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523906} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.910366] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 87fa97a7-a8a5-4184-b52a-b02ad5468127/87fa97a7-a8a5-4184-b52a-b02ad5468127.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1278.910589] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1278.910866] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3706ac0-d54f-4a14-a986-041bcad71bb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.913687] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89d032b-3000-4201-b658-a9de8c1d3dfb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.930153] env[63297]: DEBUG nova.compute.provider_tree [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.932835] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1278.932835] env[63297]: value = "task-1697102" [ 1278.932835] env[63297]: _type = "Task" [ 1278.932835] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.946294] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.950178] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697101, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.240781] env[63297]: DEBUG oslo_concurrency.lockutils [req-b2e31e2b-5441-470f-ad9c-7121d90a7364 req-1cdade58-30bb-4c89-885b-fc003379fc5f service nova] Releasing lock "refresh_cache-87fa97a7-a8a5-4184-b52a-b02ad5468127" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1279.362069] env[63297]: DEBUG nova.compute.manager [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1279.362959] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b1e5ef-6a6c-451a-b36c-6719267ab428 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.432990] env[63297]: DEBUG nova.scheduler.client.report [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1279.442215] env[63297]: DEBUG nova.network.neutron [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1279.455291] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07217} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.459018] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1279.459018] env[63297]: DEBUG oslo_vmware.api [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697101, 'name': PowerOnVM_Task, 'duration_secs': 0.969445} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.459954] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6027998-0840-4093-95f6-8eb4cd9c01d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.462350] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1279.466021] env[63297]: INFO nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1279.466021] env[63297]: DEBUG nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1279.466021] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c119339c-23d5-45b1-8d58-380e5f3787b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.489250] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 87fa97a7-a8a5-4184-b52a-b02ad5468127/87fa97a7-a8a5-4184-b52a-b02ad5468127.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1279.493586] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab417a21-aa00-4526-bf94-43920f1a3f60 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.521159] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1279.521159] env[63297]: value = "task-1697103" [ 1279.521159] env[63297]: _type = "Task" [ 1279.521159] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.530970] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697103, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.637919] env[63297]: DEBUG nova.network.neutron [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Updating instance_info_cache with network_info: [{"id": "9e58a5c1-8922-4504-a29b-1b53dd0ff360", "address": "fa:16:3e:99:ff:54", "network": {"id": "9492ff54-c1e9-4a6f-963d-c74818c8440a", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2013269520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b519161d82f9457eaebe7ea8a533b454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e58a5c1-89", "ovs_interfaceid": "9e58a5c1-8922-4504-a29b-1b53dd0ff360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.731651] env[63297]: DEBUG nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1279.754306] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1279.754586] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1279.754746] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.754924] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1279.755079] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.755225] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1279.755429] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1279.755582] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1279.755743] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1279.755935] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1279.756138] env[63297]: DEBUG nova.virt.hardware [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1279.756979] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de27cdf0-7da1-4c12-a618-e94f5370c7dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.764950] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a4e029-064d-43e6-9acc-f2150de722f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.876883] env[63297]: INFO nova.compute.manager [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] instance snapshotting [ 1279.879814] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a09e5c0-e41b-44ff-97f0-bbc3eb7a9378 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.899279] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc6c68f-613d-4722-9010-cd388499c6ad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.942463] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.237s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.944681] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.009s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.947076] env[63297]: INFO nova.compute.claims [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1279.970632] env[63297]: INFO nova.scheduler.client.report [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Deleted allocations for instance 459d5a17-182b-4284-b464-57d342981031 [ 1280.020479] env[63297]: INFO nova.compute.manager [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Took 44.39 seconds to build instance. [ 1280.031299] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697103, 'name': ReconfigVM_Task, 'duration_secs': 0.25978} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.031411] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 87fa97a7-a8a5-4184-b52a-b02ad5468127/87fa97a7-a8a5-4184-b52a-b02ad5468127.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1280.032026] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-751fee24-9b9b-41e4-aad4-6412cd4f1276 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.038893] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1280.038893] env[63297]: value = "task-1697104" [ 1280.038893] env[63297]: _type = "Task" [ 1280.038893] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.047709] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697104, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.140499] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Releasing lock "refresh_cache-22a927ad-c2af-4814-b728-ec31b76a34d4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.140912] env[63297]: DEBUG nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Instance network_info: |[{"id": "9e58a5c1-8922-4504-a29b-1b53dd0ff360", "address": "fa:16:3e:99:ff:54", "network": {"id": "9492ff54-c1e9-4a6f-963d-c74818c8440a", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2013269520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b519161d82f9457eaebe7ea8a533b454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e58a5c1-89", "ovs_interfaceid": "9e58a5c1-8922-4504-a29b-1b53dd0ff360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1280.141355] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:ff:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e58a5c1-8922-4504-a29b-1b53dd0ff360', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1280.149883] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Creating folder: Project (b519161d82f9457eaebe7ea8a533b454). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1280.150200] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-853dd8b4-8170-4498-9e1c-10d561dfa961 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.160494] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Created folder: Project (b519161d82f9457eaebe7ea8a533b454) in parent group-v353718. [ 1280.160687] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Creating folder: Instances. Parent ref: group-v353811. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1280.161180] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f434d15-ddc5-4359-9de4-09dacc37967b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.175518] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Created folder: Instances in parent group-v353811. [ 1280.175820] env[63297]: DEBUG oslo.service.loopingcall [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1280.175987] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1280.176204] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2280cebf-90a5-404e-a19e-b3003120a8f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.195900] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.195900] env[63297]: value = "task-1697107" [ 1280.195900] env[63297]: _type = "Task" [ 1280.195900] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.210955] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697107, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.213374] env[63297]: DEBUG nova.compute.manager [req-c223738d-4d4d-455e-a60f-7fc10e883435 req-c8e19cfd-72cc-4d8f-98ed-258260a72d46 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Received event network-vif-plugged-7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1280.213374] env[63297]: DEBUG oslo_concurrency.lockutils [req-c223738d-4d4d-455e-a60f-7fc10e883435 req-c8e19cfd-72cc-4d8f-98ed-258260a72d46 service nova] Acquiring lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.213374] env[63297]: DEBUG oslo_concurrency.lockutils [req-c223738d-4d4d-455e-a60f-7fc10e883435 req-c8e19cfd-72cc-4d8f-98ed-258260a72d46 service nova] Lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.213540] env[63297]: DEBUG oslo_concurrency.lockutils [req-c223738d-4d4d-455e-a60f-7fc10e883435 req-c8e19cfd-72cc-4d8f-98ed-258260a72d46 service nova] Lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.213696] env[63297]: DEBUG nova.compute.manager [req-c223738d-4d4d-455e-a60f-7fc10e883435 req-c8e19cfd-72cc-4d8f-98ed-258260a72d46 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] No waiting events found dispatching network-vif-plugged-7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1280.213865] env[63297]: WARNING nova.compute.manager [req-c223738d-4d4d-455e-a60f-7fc10e883435 req-c8e19cfd-72cc-4d8f-98ed-258260a72d46 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Received unexpected event network-vif-plugged-7afec2db-71a8-42c9-aabb-988b70a71ede for instance with vm_state building and task_state spawning. [ 1280.308202] env[63297]: DEBUG nova.network.neutron [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Successfully updated port: 7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1280.410561] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1280.410908] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f3c80d38-b2e8-4c7b-941c-10a6d9ea79cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.418087] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1280.418087] env[63297]: value = "task-1697108" [ 1280.418087] env[63297]: _type = "Task" [ 1280.418087] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.426060] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697108, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.477378] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2fd8458-2f5c-444e-a8b7-1b67d647d6f3 tempest-ServerExternalEventsTest-1375414536 tempest-ServerExternalEventsTest-1375414536-project-member] Lock "459d5a17-182b-4284-b464-57d342981031" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.706s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.525599] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2115d2fc-9909-4882-beb1-4a939630a6dd tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "92439795-6240-4103-940b-de6d87738570" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.020s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.551538] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697104, 'name': Rename_Task, 'duration_secs': 0.137357} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.551876] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1280.552209] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c615abcb-639f-46db-9425-2e445e978eaa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.561088] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1280.561088] env[63297]: value = "task-1697109" [ 1280.561088] env[63297]: _type = "Task" [ 1280.561088] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.563285] env[63297]: DEBUG nova.compute.manager [req-8529ef95-e59a-4e7b-b301-a97dbdc38e63 req-d1402be9-c8f3-4a30-a4c6-a681b9f7f648 service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Received event network-changed-9e58a5c1-8922-4504-a29b-1b53dd0ff360 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1280.563565] env[63297]: DEBUG nova.compute.manager [req-8529ef95-e59a-4e7b-b301-a97dbdc38e63 req-d1402be9-c8f3-4a30-a4c6-a681b9f7f648 service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Refreshing instance network info cache due to event network-changed-9e58a5c1-8922-4504-a29b-1b53dd0ff360. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1280.563893] env[63297]: DEBUG oslo_concurrency.lockutils [req-8529ef95-e59a-4e7b-b301-a97dbdc38e63 req-d1402be9-c8f3-4a30-a4c6-a681b9f7f648 service nova] Acquiring lock "refresh_cache-22a927ad-c2af-4814-b728-ec31b76a34d4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.564122] env[63297]: DEBUG oslo_concurrency.lockutils [req-8529ef95-e59a-4e7b-b301-a97dbdc38e63 req-d1402be9-c8f3-4a30-a4c6-a681b9f7f648 service nova] Acquired lock "refresh_cache-22a927ad-c2af-4814-b728-ec31b76a34d4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.564391] env[63297]: DEBUG nova.network.neutron [req-8529ef95-e59a-4e7b-b301-a97dbdc38e63 req-d1402be9-c8f3-4a30-a4c6-a681b9f7f648 service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Refreshing network info cache for port 9e58a5c1-8922-4504-a29b-1b53dd0ff360 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1280.575890] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697109, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.708076] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697107, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.811295] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquiring lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.811461] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquired lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.811675] env[63297]: DEBUG nova.network.neutron [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1280.927996] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697108, 'name': CreateSnapshot_Task, 'duration_secs': 0.48078} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.928265] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1280.929154] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78142b35-0e4b-4a56-bd96-404c594247a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.028296] env[63297]: DEBUG nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1281.075183] env[63297]: DEBUG oslo_vmware.api [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697109, 'name': PowerOnVM_Task, 'duration_secs': 0.443233} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.078657] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1281.078657] env[63297]: INFO nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Took 6.80 seconds to spawn the instance on the hypervisor. [ 1281.078800] env[63297]: DEBUG nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1281.081867] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e2fd66-1e8f-41b7-aade-daf74eac7a1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.207438] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697107, 'name': CreateVM_Task, 'duration_secs': 0.648678} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.207541] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1281.208197] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.208357] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.208684] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1281.211323] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc664d62-d80d-4998-bf5b-6fb3e5aba57b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.216900] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1281.216900] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb406f-30f5-692e-07fc-bf0a64f0d7e3" [ 1281.216900] env[63297]: _type = "Task" [ 1281.216900] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.228467] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb406f-30f5-692e-07fc-bf0a64f0d7e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.372276] env[63297]: DEBUG nova.network.neutron [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1281.387969] env[63297]: DEBUG nova.network.neutron [req-8529ef95-e59a-4e7b-b301-a97dbdc38e63 req-d1402be9-c8f3-4a30-a4c6-a681b9f7f648 service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Updated VIF entry in instance network info cache for port 9e58a5c1-8922-4504-a29b-1b53dd0ff360. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1281.388256] env[63297]: DEBUG nova.network.neutron [req-8529ef95-e59a-4e7b-b301-a97dbdc38e63 req-d1402be9-c8f3-4a30-a4c6-a681b9f7f648 service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Updating instance_info_cache with network_info: [{"id": "9e58a5c1-8922-4504-a29b-1b53dd0ff360", "address": "fa:16:3e:99:ff:54", "network": {"id": "9492ff54-c1e9-4a6f-963d-c74818c8440a", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2013269520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b519161d82f9457eaebe7ea8a533b454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e58a5c1-89", "ovs_interfaceid": "9e58a5c1-8922-4504-a29b-1b53dd0ff360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.446886] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1281.449718] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-abef6daa-3959-4371-a5cd-613006191b76 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.458333] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1281.458333] env[63297]: value = "task-1697110" [ 1281.458333] env[63297]: _type = "Task" [ 1281.458333] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.470858] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697110, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.551562] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.565922] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3978cec9-6c1d-473f-8e0c-0578d428b3ad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.574775] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7ae32d-cbb4-455b-a6e2-3b968c9ea5bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.615888] env[63297]: DEBUG nova.network.neutron [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updating instance_info_cache with network_info: [{"id": "7afec2db-71a8-42c9-aabb-988b70a71ede", "address": "fa:16:3e:49:e6:3e", "network": {"id": "c5f8653d-b207-4631-8f20-252c17df44b3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2063892400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a056927390f4ff2a0305bd7e8ad5f3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afec2db-71", "ovs_interfaceid": "7afec2db-71a8-42c9-aabb-988b70a71ede", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.617591] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1faf8b21-6ed2-4ebb-ad1e-faf083e3831b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.622883] env[63297]: INFO nova.compute.manager [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Took 37.62 seconds to build instance. [ 1281.627377] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fef974e-c509-4a20-bad8-9eb20fc8b172 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.644100] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.644370] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.644648] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.644899] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.645556] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.648058] env[63297]: DEBUG nova.compute.provider_tree [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.649984] env[63297]: INFO nova.compute.manager [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Terminating instance [ 1281.656126] env[63297]: DEBUG nova.compute.manager [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1281.656315] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1281.657220] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679da05e-b197-405f-809f-b627e7c989d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.665697] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1281.665949] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87beac39-9b2c-4535-ac6c-358cc403c8a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.673896] env[63297]: DEBUG oslo_vmware.api [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1281.673896] env[63297]: value = "task-1697111" [ 1281.673896] env[63297]: _type = "Task" [ 1281.673896] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.684426] env[63297]: DEBUG oslo_vmware.api [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697111, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.730024] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb406f-30f5-692e-07fc-bf0a64f0d7e3, 'name': SearchDatastore_Task, 'duration_secs': 0.015923} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.730024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.730024] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1281.730024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.730024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.730024] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.730024] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-125a5044-2156-40c8-a3ca-760143ef4100 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.740699] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.740961] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1281.742184] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6112d505-c229-49a0-896a-8148a5823247 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.752235] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1281.752235] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524429df-757c-c068-045e-b047d02ad940" [ 1281.752235] env[63297]: _type = "Task" [ 1281.752235] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.762432] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524429df-757c-c068-045e-b047d02ad940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.892726] env[63297]: DEBUG oslo_concurrency.lockutils [req-8529ef95-e59a-4e7b-b301-a97dbdc38e63 req-d1402be9-c8f3-4a30-a4c6-a681b9f7f648 service nova] Releasing lock "refresh_cache-22a927ad-c2af-4814-b728-ec31b76a34d4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.968876] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697110, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.122201] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Releasing lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.124634] env[63297]: DEBUG nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Instance network_info: |[{"id": "7afec2db-71a8-42c9-aabb-988b70a71ede", "address": "fa:16:3e:49:e6:3e", "network": {"id": "c5f8653d-b207-4631-8f20-252c17df44b3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2063892400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a056927390f4ff2a0305bd7e8ad5f3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afec2db-71", "ovs_interfaceid": "7afec2db-71a8-42c9-aabb-988b70a71ede", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1282.124634] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:e6:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7afec2db-71a8-42c9-aabb-988b70a71ede', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1282.132795] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Creating folder: Project (1a056927390f4ff2a0305bd7e8ad5f3c). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1282.133316] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dc7df150-b9f3-48c9-8e21-5d594720ec77 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "87fa97a7-a8a5-4184-b52a-b02ad5468127" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.972s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.133543] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-101e9e89-d76e-4b27-8f91-fbedcfbc6ec0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.146142] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Created folder: Project (1a056927390f4ff2a0305bd7e8ad5f3c) in parent group-v353718. [ 1282.146842] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Creating folder: Instances. Parent ref: group-v353816. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1282.147091] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-052b47d3-a56d-41ac-8919-258fd97fe2aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.152589] env[63297]: DEBUG nova.scheduler.client.report [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1282.157288] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Created folder: Instances in parent group-v353816. [ 1282.157536] env[63297]: DEBUG oslo.service.loopingcall [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1282.157921] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1282.158186] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d00391d9-45c3-4a55-b0a1-a12616498bec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.179094] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1282.179094] env[63297]: value = "task-1697114" [ 1282.179094] env[63297]: _type = "Task" [ 1282.179094] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.185240] env[63297]: DEBUG oslo_vmware.api [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697111, 'name': PowerOffVM_Task, 'duration_secs': 0.284669} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.185875] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1282.186019] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1282.186260] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-360b33af-b287-42cb-8ee8-3ab4fa93be09 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.192617] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697114, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.259966] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524429df-757c-c068-045e-b047d02ad940, 'name': SearchDatastore_Task, 'duration_secs': 0.012871} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.260786] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4f96056-66b6-41e9-9405-ddcb6c249f8e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.266482] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1282.266482] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5224d51b-fac5-f8da-c8ab-b68a95c5e42a" [ 1282.266482] env[63297]: _type = "Task" [ 1282.266482] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.274918] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5224d51b-fac5-f8da-c8ab-b68a95c5e42a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.300616] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1282.300828] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1282.301017] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleting the datastore file [datastore1] 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1282.301293] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd29cb29-a754-408a-8c0e-862c57dab5b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.307683] env[63297]: DEBUG oslo_vmware.api [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1282.307683] env[63297]: value = "task-1697116" [ 1282.307683] env[63297]: _type = "Task" [ 1282.307683] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.316556] env[63297]: DEBUG oslo_vmware.api [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.323687] env[63297]: DEBUG nova.compute.manager [req-5ecf79be-6526-4001-89cd-3bb0dfa43d47 req-94d69d91-2e90-4d73-a14f-ab069ba7d298 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Received event network-changed-7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1282.323900] env[63297]: DEBUG nova.compute.manager [req-5ecf79be-6526-4001-89cd-3bb0dfa43d47 req-94d69d91-2e90-4d73-a14f-ab069ba7d298 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Refreshing instance network info cache due to event network-changed-7afec2db-71a8-42c9-aabb-988b70a71ede. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1282.324178] env[63297]: DEBUG oslo_concurrency.lockutils [req-5ecf79be-6526-4001-89cd-3bb0dfa43d47 req-94d69d91-2e90-4d73-a14f-ab069ba7d298 service nova] Acquiring lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.324305] env[63297]: DEBUG oslo_concurrency.lockutils [req-5ecf79be-6526-4001-89cd-3bb0dfa43d47 req-94d69d91-2e90-4d73-a14f-ab069ba7d298 service nova] Acquired lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.324466] env[63297]: DEBUG nova.network.neutron [req-5ecf79be-6526-4001-89cd-3bb0dfa43d47 req-94d69d91-2e90-4d73-a14f-ab069ba7d298 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Refreshing network info cache for port 7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.470811] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697110, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.637869] env[63297]: DEBUG nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1282.659937] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.660529] env[63297]: DEBUG nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1282.667441] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.671s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.667715] env[63297]: DEBUG nova.objects.instance [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lazy-loading 'resources' on Instance uuid f3a579de-1f29-4b67-8dc8-07ea37267001 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1282.694037] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697114, 'name': CreateVM_Task, 'duration_secs': 0.400682} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.694037] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1282.695028] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.695136] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.697357] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1282.697357] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0b94e8d-3d7a-46e6-ac46-77fa84ae5e6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.706531] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1282.706531] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52487713-707c-123c-b5ce-046af40cf556" [ 1282.706531] env[63297]: _type = "Task" [ 1282.706531] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.718477] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52487713-707c-123c-b5ce-046af40cf556, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.777893] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5224d51b-fac5-f8da-c8ab-b68a95c5e42a, 'name': SearchDatastore_Task, 'duration_secs': 0.011407} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.778206] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.778461] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 22a927ad-c2af-4814-b728-ec31b76a34d4/22a927ad-c2af-4814-b728-ec31b76a34d4.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1282.778720] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1102a24d-bb59-482e-a516-b25409f095f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.785258] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1282.785258] env[63297]: value = "task-1697117" [ 1282.785258] env[63297]: _type = "Task" [ 1282.785258] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.793799] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.817220] env[63297]: DEBUG oslo_vmware.api [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19545} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.817462] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1282.817641] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1282.817812] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1282.817978] env[63297]: INFO nova.compute.manager [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1282.821072] env[63297]: DEBUG oslo.service.loopingcall [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1282.821072] env[63297]: DEBUG nova.compute.manager [-] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1282.821072] env[63297]: DEBUG nova.network.neutron [-] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1282.973042] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697110, 'name': CloneVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.045982] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquiring lock "87fa97a7-a8a5-4184-b52a-b02ad5468127" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.046920] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "87fa97a7-a8a5-4184-b52a-b02ad5468127" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.047317] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquiring lock "87fa97a7-a8a5-4184-b52a-b02ad5468127-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.047616] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "87fa97a7-a8a5-4184-b52a-b02ad5468127-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.047912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "87fa97a7-a8a5-4184-b52a-b02ad5468127-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.050361] env[63297]: INFO nova.compute.manager [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Terminating instance [ 1283.052430] env[63297]: DEBUG nova.compute.manager [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1283.052908] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1283.054094] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc05bb0-e650-49b1-b0aa-ef6c42bd56d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.064873] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1283.065193] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-852d0284-78f2-4910-8ae4-6ad7de8f1713 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.074020] env[63297]: DEBUG oslo_vmware.api [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1283.074020] env[63297]: value = "task-1697118" [ 1283.074020] env[63297]: _type = "Task" [ 1283.074020] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.080568] env[63297]: DEBUG oslo_vmware.api [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.091821] env[63297]: DEBUG nova.network.neutron [req-5ecf79be-6526-4001-89cd-3bb0dfa43d47 req-94d69d91-2e90-4d73-a14f-ab069ba7d298 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updated VIF entry in instance network info cache for port 7afec2db-71a8-42c9-aabb-988b70a71ede. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.091821] env[63297]: DEBUG nova.network.neutron [req-5ecf79be-6526-4001-89cd-3bb0dfa43d47 req-94d69d91-2e90-4d73-a14f-ab069ba7d298 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updating instance_info_cache with network_info: [{"id": "7afec2db-71a8-42c9-aabb-988b70a71ede", "address": "fa:16:3e:49:e6:3e", "network": {"id": "c5f8653d-b207-4631-8f20-252c17df44b3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2063892400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a056927390f4ff2a0305bd7e8ad5f3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afec2db-71", "ovs_interfaceid": "7afec2db-71a8-42c9-aabb-988b70a71ede", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.168825] env[63297]: DEBUG nova.compute.utils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1283.174157] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.174442] env[63297]: DEBUG nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1283.174611] env[63297]: DEBUG nova.network.neutron [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1283.219158] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52487713-707c-123c-b5ce-046af40cf556, 'name': SearchDatastore_Task, 'duration_secs': 0.016083} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.222454] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.223038] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1283.223328] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1283.223520] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.223745] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1283.224756] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fe77636-f469-4ca4-a23c-a198ffc9346e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.240782] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1283.240998] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1283.241984] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5da352c-ab7e-4244-b5f6-4ca3d512b6d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.248595] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1283.248595] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d469a-ba18-c446-560b-2cc604b9e01e" [ 1283.248595] env[63297]: _type = "Task" [ 1283.248595] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.258446] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d469a-ba18-c446-560b-2cc604b9e01e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.279197] env[63297]: DEBUG nova.policy [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080894c6e46d4686b5bfd67e5eddbe2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20f891cd9bb546b9bfe8095234165327', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1283.298419] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697117, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.321803] env[63297]: DEBUG nova.compute.manager [req-7f5744ec-bb6e-44d2-b712-5430503ecec6 req-3916384a-65d4-4d3e-941b-af337cbc3011 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Received event network-vif-deleted-a0f734f5-ff87-41a5-ac99-57124d62dcda {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1283.322143] env[63297]: INFO nova.compute.manager [req-7f5744ec-bb6e-44d2-b712-5430503ecec6 req-3916384a-65d4-4d3e-941b-af337cbc3011 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Neutron deleted interface a0f734f5-ff87-41a5-ac99-57124d62dcda; detaching it from the instance and deleting it from the info cache [ 1283.322228] env[63297]: DEBUG nova.network.neutron [req-7f5744ec-bb6e-44d2-b712-5430503ecec6 req-3916384a-65d4-4d3e-941b-af337cbc3011 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.479326] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697110, 'name': CloneVM_Task, 'duration_secs': 1.53928} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.482829] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Created linked-clone VM from snapshot [ 1283.484642] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dbd30e-d87d-4285-9e65-330d2f86a089 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.494241] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Uploading image 861a16f6-8868-469a-b39d-c225f8b1c8b3 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1283.532030] env[63297]: DEBUG oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1283.532030] env[63297]: value = "vm-353815" [ 1283.532030] env[63297]: _type = "VirtualMachine" [ 1283.532030] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1283.532348] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e2c16896-d86a-4624-94a0-f4fb48269df8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.542396] env[63297]: DEBUG oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lease: (returnval){ [ 1283.542396] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c97be7-5e7c-cbf0-7edf-8397efaa989e" [ 1283.542396] env[63297]: _type = "HttpNfcLease" [ 1283.542396] env[63297]: } obtained for exporting VM: (result){ [ 1283.542396] env[63297]: value = "vm-353815" [ 1283.542396] env[63297]: _type = "VirtualMachine" [ 1283.542396] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1283.542665] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the lease: (returnval){ [ 1283.542665] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c97be7-5e7c-cbf0-7edf-8397efaa989e" [ 1283.542665] env[63297]: _type = "HttpNfcLease" [ 1283.542665] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1283.554969] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1283.554969] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c97be7-5e7c-cbf0-7edf-8397efaa989e" [ 1283.554969] env[63297]: _type = "HttpNfcLease" [ 1283.554969] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1283.592381] env[63297]: DEBUG oslo_vmware.api [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697118, 'name': PowerOffVM_Task, 'duration_secs': 0.279122} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.593199] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1283.594401] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1283.594401] env[63297]: DEBUG oslo_concurrency.lockutils [req-5ecf79be-6526-4001-89cd-3bb0dfa43d47 req-94d69d91-2e90-4d73-a14f-ab069ba7d298 service nova] Releasing lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.594401] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f1a1bbb-8689-4762-908d-b43575d2c9d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.677697] env[63297]: DEBUG nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1283.700931] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1283.701182] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1283.701363] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Deleting the datastore file [datastore1] 87fa97a7-a8a5-4184-b52a-b02ad5468127 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1283.701677] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b1c6a39-a571-4d00-8ae9-4310d7f392d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.711696] env[63297]: DEBUG oslo_vmware.api [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for the task: (returnval){ [ 1283.711696] env[63297]: value = "task-1697121" [ 1283.711696] env[63297]: _type = "Task" [ 1283.711696] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.721421] env[63297]: DEBUG oslo_vmware.api [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697121, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.761088] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d469a-ba18-c446-560b-2cc604b9e01e, 'name': SearchDatastore_Task, 'duration_secs': 0.06424} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.761513] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baa54dd7-0e79-4b6c-878f-6548331d59da {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.767917] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1283.767917] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5280d727-1288-4ce3-d8ce-87198eaf689a" [ 1283.767917] env[63297]: _type = "Task" [ 1283.767917] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.779074] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5280d727-1288-4ce3-d8ce-87198eaf689a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.786341] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf414352-86ad-4d15-98b0-dd97dc66183b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.798413] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.770057} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.800786] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 22a927ad-c2af-4814-b728-ec31b76a34d4/22a927ad-c2af-4814-b728-ec31b76a34d4.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1283.801027] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1283.801298] env[63297]: DEBUG nova.network.neutron [-] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.802459] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92d63306-d9cb-4086-aaeb-40ba53e83c09 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.805373] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6ae699-4a1b-4685-9a58-2a3c1624328b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.849847] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b9df0bd-f441-4d1a-a500-6d11349dd44b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.853301] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d0884e-15c0-46fe-9cfd-acb48f6f8491 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.857920] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1283.857920] env[63297]: value = "task-1697122" [ 1283.857920] env[63297]: _type = "Task" [ 1283.857920] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.874073] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68621c00-6f56-4eb8-a266-814eaf7683ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.880064] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697122, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.884845] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9108720a-e44e-4d41-95eb-f60e371823e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.906483] env[63297]: DEBUG nova.compute.provider_tree [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.924861] env[63297]: DEBUG nova.compute.manager [req-7f5744ec-bb6e-44d2-b712-5430503ecec6 req-3916384a-65d4-4d3e-941b-af337cbc3011 service nova] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Detach interface failed, port_id=a0f734f5-ff87-41a5-ac99-57124d62dcda, reason: Instance 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1284.052223] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1284.052223] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c97be7-5e7c-cbf0-7edf-8397efaa989e" [ 1284.052223] env[63297]: _type = "HttpNfcLease" [ 1284.052223] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1284.052223] env[63297]: DEBUG oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1284.052223] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c97be7-5e7c-cbf0-7edf-8397efaa989e" [ 1284.052223] env[63297]: _type = "HttpNfcLease" [ 1284.052223] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1284.052748] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7e97e3-26ea-4e12-b248-da29e73c93a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.061703] env[63297]: DEBUG oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5231c4d9-21e6-e464-94ae-e5a23cc3c8a1/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1284.061896] env[63297]: DEBUG oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5231c4d9-21e6-e464-94ae-e5a23cc3c8a1/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1284.120953] env[63297]: DEBUG nova.network.neutron [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Successfully created port: e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1284.153722] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-28aebecb-bbef-4e89-b2cd-53cdeac9e6ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.222909] env[63297]: DEBUG oslo_vmware.api [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697121, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.279296] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5280d727-1288-4ce3-d8ce-87198eaf689a, 'name': SearchDatastore_Task, 'duration_secs': 0.034151} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.279571] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.279829] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf/862302e5-ad7e-40f3-a4a3-8c4a8035e1cf.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1284.280106] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67b741b1-6d83-413f-952b-6473a88c566c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.290274] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1284.290274] env[63297]: value = "task-1697123" [ 1284.290274] env[63297]: _type = "Task" [ 1284.290274] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.300576] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.310269] env[63297]: INFO nova.compute.manager [-] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Took 1.49 seconds to deallocate network for instance. [ 1284.369523] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697122, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070838} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.369523] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1284.369824] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c914445e-5c3d-47e9-907f-964fa476e4fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.394423] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 22a927ad-c2af-4814-b728-ec31b76a34d4/22a927ad-c2af-4814-b728-ec31b76a34d4.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1284.394860] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42695dc4-5ea9-4b14-893c-2bdc65b9b583 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.410146] env[63297]: DEBUG nova.scheduler.client.report [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1284.424480] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1284.424480] env[63297]: value = "task-1697124" [ 1284.424480] env[63297]: _type = "Task" [ 1284.424480] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.688875] env[63297]: DEBUG nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1284.723120] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1284.723705] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1284.724157] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1284.724792] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1284.725176] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1284.725493] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1284.725916] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1284.726246] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1284.726506] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1284.726903] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1284.727842] env[63297]: DEBUG nova.virt.hardware [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1284.729107] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470638ac-5851-4659-bbed-a5c7039c8b15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.739148] env[63297]: DEBUG oslo_vmware.api [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Task: {'id': task-1697121, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.728673} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.740098] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1284.740420] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1284.740671] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1284.740847] env[63297]: INFO nova.compute.manager [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1284.741611] env[63297]: DEBUG oslo.service.loopingcall [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1284.741998] env[63297]: DEBUG nova.compute.manager [-] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1284.742143] env[63297]: DEBUG nova.network.neutron [-] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1284.750642] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d721acf-202c-4529-a281-17a7a5474d1e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.802286] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697123, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.817117] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.916382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.249s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.919439] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.672s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.921467] env[63297]: INFO nova.compute.claims [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1284.937821] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697124, 'name': ReconfigVM_Task, 'duration_secs': 0.36284} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.938191] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 22a927ad-c2af-4814-b728-ec31b76a34d4/22a927ad-c2af-4814-b728-ec31b76a34d4.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1284.938947] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d0aaa5f-29a9-4fd3-b7b9-6bb56884b972 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.950387] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1284.950387] env[63297]: value = "task-1697125" [ 1284.950387] env[63297]: _type = "Task" [ 1284.950387] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.961331] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697125, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.963638] env[63297]: INFO nova.scheduler.client.report [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Deleted allocations for instance f3a579de-1f29-4b67-8dc8-07ea37267001 [ 1285.310956] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697123, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607402} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.311536] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf/862302e5-ad7e-40f3-a4a3-8c4a8035e1cf.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1285.311939] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1285.312301] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e8f14f8-46de-4cbd-a171-18bc7007906a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.323920] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1285.323920] env[63297]: value = "task-1697126" [ 1285.323920] env[63297]: _type = "Task" [ 1285.323920] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.343571] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.465025] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697125, 'name': Rename_Task, 'duration_secs': 0.165888} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.465025] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1285.465025] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a86df5df-cae9-4093-9fb5-3b1e825f6290 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.474732] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1285.474732] env[63297]: value = "task-1697127" [ 1285.474732] env[63297]: _type = "Task" [ 1285.474732] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.475293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-619afa3c-6788-41bd-8b8e-ca9f0e044194 tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "f3a579de-1f29-4b67-8dc8-07ea37267001" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.981s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.494953] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697127, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.515530] env[63297]: DEBUG nova.compute.manager [req-9e464655-9ee0-4fc4-8908-4bcd9034424f req-7cb92ffe-5473-4778-ab07-944fd93481bf service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Received event network-vif-deleted-6b15b0b7-1c3f-4da4-80bd-92d9d77975cd {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1285.515791] env[63297]: INFO nova.compute.manager [req-9e464655-9ee0-4fc4-8908-4bcd9034424f req-7cb92ffe-5473-4778-ab07-944fd93481bf service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Neutron deleted interface 6b15b0b7-1c3f-4da4-80bd-92d9d77975cd; detaching it from the instance and deleting it from the info cache [ 1285.515995] env[63297]: DEBUG nova.network.neutron [req-9e464655-9ee0-4fc4-8908-4bcd9034424f req-7cb92ffe-5473-4778-ab07-944fd93481bf service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.578390] env[63297]: DEBUG nova.network.neutron [-] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.837018] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071388} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.837532] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1285.841348] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79485bcb-5a6f-4336-baa7-3ba9ab86b431 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.872603] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf/862302e5-ad7e-40f3-a4a3-8c4a8035e1cf.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1285.872961] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23b4e35c-6eac-47cf-b20f-ed31d5c47a46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.906869] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1285.906869] env[63297]: value = "task-1697128" [ 1285.906869] env[63297]: _type = "Task" [ 1285.906869] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.989517] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697127, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.020017] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2142174e-9156-47ee-9d29-21387774c8d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.032544] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaa0558-6d9f-4850-9b53-7f1223e6e300 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.050046] env[63297]: DEBUG nova.compute.manager [req-9d4eb91b-9550-4548-bd16-fad1c8dae6cb req-bee742f9-8ae1-499a-b5f6-9ff798f126bb service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Received event network-vif-plugged-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1286.050358] env[63297]: DEBUG oslo_concurrency.lockutils [req-9d4eb91b-9550-4548-bd16-fad1c8dae6cb req-bee742f9-8ae1-499a-b5f6-9ff798f126bb service nova] Acquiring lock "b65e8c04-df55-491e-861c-8aa6def8c9be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.050616] env[63297]: DEBUG oslo_concurrency.lockutils [req-9d4eb91b-9550-4548-bd16-fad1c8dae6cb req-bee742f9-8ae1-499a-b5f6-9ff798f126bb service nova] Lock "b65e8c04-df55-491e-861c-8aa6def8c9be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.050880] env[63297]: DEBUG oslo_concurrency.lockutils [req-9d4eb91b-9550-4548-bd16-fad1c8dae6cb req-bee742f9-8ae1-499a-b5f6-9ff798f126bb service nova] Lock "b65e8c04-df55-491e-861c-8aa6def8c9be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.051038] env[63297]: DEBUG nova.compute.manager [req-9d4eb91b-9550-4548-bd16-fad1c8dae6cb req-bee742f9-8ae1-499a-b5f6-9ff798f126bb service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] No waiting events found dispatching network-vif-plugged-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1286.051501] env[63297]: WARNING nova.compute.manager [req-9d4eb91b-9550-4548-bd16-fad1c8dae6cb req-bee742f9-8ae1-499a-b5f6-9ff798f126bb service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Received unexpected event network-vif-plugged-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 for instance with vm_state building and task_state spawning. [ 1286.082979] env[63297]: INFO nova.compute.manager [-] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Took 1.34 seconds to deallocate network for instance. [ 1286.083274] env[63297]: DEBUG nova.compute.manager [req-9e464655-9ee0-4fc4-8908-4bcd9034424f req-7cb92ffe-5473-4778-ab07-944fd93481bf service nova] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Detach interface failed, port_id=6b15b0b7-1c3f-4da4-80bd-92d9d77975cd, reason: Instance 87fa97a7-a8a5-4184-b52a-b02ad5468127 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1286.206134] env[63297]: DEBUG nova.network.neutron [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Successfully updated port: e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1286.421128] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.491570] env[63297]: DEBUG oslo_vmware.api [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697127, 'name': PowerOnVM_Task, 'duration_secs': 0.667941} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.491570] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1286.494246] env[63297]: INFO nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1286.495688] env[63297]: DEBUG nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1286.496211] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d560f1ef-1ae9-4f7f-96d0-1a7bb676b6cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.582906] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338e1e3d-af6d-42b5-a7a9-f3def809828f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.593545] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.595471] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64782342-bb67-4a78-9a87-481e55399420 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.629569] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba711fc-16b3-4eaf-876e-9c1f0ae64120 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.640160] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3461ca45-d262-4e71-955a-25237da5f756 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.659970] env[63297]: DEBUG nova.compute.provider_tree [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1286.712732] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.712890] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquired lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.713115] env[63297]: DEBUG nova.network.neutron [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.776027] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.776318] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.776534] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.776713] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.776893] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.779231] env[63297]: INFO nova.compute.manager [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Terminating instance [ 1286.781098] env[63297]: DEBUG nova.compute.manager [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1286.781388] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1286.782398] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47b66a0-12b2-4ea5-abdd-185138e7c07a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.796590] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1286.796590] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ac87597-8a3c-4a9c-a431-7071624b73d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.805751] env[63297]: DEBUG oslo_vmware.api [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1286.805751] env[63297]: value = "task-1697129" [ 1286.805751] env[63297]: _type = "Task" [ 1286.805751] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.820067] env[63297]: DEBUG oslo_vmware.api [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.924476] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697128, 'name': ReconfigVM_Task, 'duration_secs': 0.714232} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.924768] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf/862302e5-ad7e-40f3-a4a3-8c4a8035e1cf.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1286.926016] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9dda971d-f653-4cb5-b05f-a712432309c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.934749] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1286.934749] env[63297]: value = "task-1697130" [ 1286.934749] env[63297]: _type = "Task" [ 1286.934749] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.946177] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697130, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.021791] env[63297]: INFO nova.compute.manager [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Took 42.53 seconds to build instance. [ 1287.161677] env[63297]: DEBUG nova.scheduler.client.report [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1287.262018] env[63297]: DEBUG nova.network.neutron [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1287.317583] env[63297]: DEBUG oslo_vmware.api [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697129, 'name': PowerOffVM_Task, 'duration_secs': 0.304648} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.317893] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1287.318193] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1287.318292] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-803e71c9-8818-4e7a-9b35-ef916fc75b86 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.403743] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1287.403743] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1287.403743] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Deleting the datastore file [datastore1] 81920a24-f406-4923-98b7-cc0f3d0ccc8b {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1287.404009] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8dd0f827-29ec-49e1-99aa-9be9b73ce0a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.412180] env[63297]: DEBUG oslo_vmware.api [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for the task: (returnval){ [ 1287.412180] env[63297]: value = "task-1697135" [ 1287.412180] env[63297]: _type = "Task" [ 1287.412180] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.423406] env[63297]: DEBUG nova.network.neutron [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updating instance_info_cache with network_info: [{"id": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "address": "fa:16:3e:ed:e8:35", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73c7c5d-39", "ovs_interfaceid": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.427427] env[63297]: DEBUG oslo_vmware.api [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697135, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.446658] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697130, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.527198] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e1837bc3-f4ec-4bb8-ac5f-f25570b4779d tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "22a927ad-c2af-4814-b728-ec31b76a34d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.643s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.667645] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.668255] env[63297]: DEBUG nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1287.672128] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.984s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.674020] env[63297]: INFO nova.compute.claims [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1287.922921] env[63297]: DEBUG oslo_vmware.api [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Task: {'id': task-1697135, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160349} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.923219] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1287.924461] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1287.924461] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1287.924461] env[63297]: INFO nova.compute.manager [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1287.924461] env[63297]: DEBUG oslo.service.loopingcall [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1287.924461] env[63297]: DEBUG nova.compute.manager [-] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1287.924461] env[63297]: DEBUG nova.network.neutron [-] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1287.928636] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Releasing lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1287.928825] env[63297]: DEBUG nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Instance network_info: |[{"id": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "address": "fa:16:3e:ed:e8:35", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73c7c5d-39", "ovs_interfaceid": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1287.929208] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:e8:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1287.936949] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Creating folder: Project (20f891cd9bb546b9bfe8095234165327). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.938039] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c07eacc1-2da3-47c1-a4ec-c9691c181c08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.947935] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697130, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.952359] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Created folder: Project (20f891cd9bb546b9bfe8095234165327) in parent group-v353718. [ 1287.952560] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Creating folder: Instances. Parent ref: group-v353822. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.952805] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88f2c57f-6310-4f21-8acc-521145baf112 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.962715] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Created folder: Instances in parent group-v353822. [ 1287.962997] env[63297]: DEBUG oslo.service.loopingcall [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1287.963196] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1287.963410] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c923b66-ff39-4509-a854-8d2b7bdd8b55 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.985549] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1287.985549] env[63297]: value = "task-1697138" [ 1287.985549] env[63297]: _type = "Task" [ 1287.985549] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.994031] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697138, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.030022] env[63297]: DEBUG nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1288.097971] env[63297]: DEBUG nova.compute.manager [req-06a323a8-6eaf-49a1-b343-89e2e1d7097e req-e6e5bfce-de3c-4f99-9658-73aa85c36ec3 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Received event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1288.098319] env[63297]: DEBUG nova.compute.manager [req-06a323a8-6eaf-49a1-b343-89e2e1d7097e req-e6e5bfce-de3c-4f99-9658-73aa85c36ec3 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing instance network info cache due to event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1288.098495] env[63297]: DEBUG oslo_concurrency.lockutils [req-06a323a8-6eaf-49a1-b343-89e2e1d7097e req-e6e5bfce-de3c-4f99-9658-73aa85c36ec3 service nova] Acquiring lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.098675] env[63297]: DEBUG oslo_concurrency.lockutils [req-06a323a8-6eaf-49a1-b343-89e2e1d7097e req-e6e5bfce-de3c-4f99-9658-73aa85c36ec3 service nova] Acquired lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.098890] env[63297]: DEBUG nova.network.neutron [req-06a323a8-6eaf-49a1-b343-89e2e1d7097e req-e6e5bfce-de3c-4f99-9658-73aa85c36ec3 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1288.181904] env[63297]: DEBUG nova.compute.utils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1288.186206] env[63297]: DEBUG nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1288.186206] env[63297]: DEBUG nova.network.neutron [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1288.220337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquiring lock "22a927ad-c2af-4814-b728-ec31b76a34d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.220337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "22a927ad-c2af-4814-b728-ec31b76a34d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.220337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquiring lock "22a927ad-c2af-4814-b728-ec31b76a34d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.220337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "22a927ad-c2af-4814-b728-ec31b76a34d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.220931] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "22a927ad-c2af-4814-b728-ec31b76a34d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.223242] env[63297]: INFO nova.compute.manager [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Terminating instance [ 1288.225137] env[63297]: DEBUG nova.compute.manager [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1288.225400] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1288.226331] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a59bd0-c489-4497-ab78-806b23caf824 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.235889] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1288.236347] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebb166c7-3ea4-4596-9296-3c105b29af99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.244329] env[63297]: DEBUG oslo_vmware.api [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1288.244329] env[63297]: value = "task-1697139" [ 1288.244329] env[63297]: _type = "Task" [ 1288.244329] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.253297] env[63297]: DEBUG nova.policy [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b3ec3a9fc5544ed864d57a099684bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2deadaf96df7430aba8594c7f98facd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1288.264303] env[63297]: DEBUG oslo_vmware.api [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697139, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.448309] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697130, 'name': Rename_Task, 'duration_secs': 1.225502} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.449022] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1288.449022] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd5b6773-201c-490d-9a3e-3c4e76dc8a91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.456274] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1288.456274] env[63297]: value = "task-1697140" [ 1288.456274] env[63297]: _type = "Task" [ 1288.456274] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.464436] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.497060] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697138, 'name': CreateVM_Task, 'duration_secs': 0.433295} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.497241] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1288.497916] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.498038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.498383] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1288.498610] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc8bd5dd-352b-4a58-9edd-0e7c35b8bc43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.503399] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1288.503399] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dd0e17-3d91-c196-6a9b-7bacb0e329af" [ 1288.503399] env[63297]: _type = "Task" [ 1288.503399] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.513744] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dd0e17-3d91-c196-6a9b-7bacb0e329af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.599373] env[63297]: DEBUG nova.network.neutron [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Successfully created port: 23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1288.647363] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.687192] env[63297]: DEBUG nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1288.758232] env[63297]: DEBUG nova.network.neutron [-] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.759682] env[63297]: DEBUG oslo_vmware.api [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697139, 'name': PowerOffVM_Task, 'duration_secs': 0.203992} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.759928] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1288.760108] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1288.760381] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dee4cb0-928f-40c7-97c7-d1d172273190 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.838553] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1288.838775] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1288.838950] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Deleting the datastore file [datastore1] 22a927ad-c2af-4814-b728-ec31b76a34d4 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1288.842235] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb8705ce-537c-4c2b-ab2c-9b294fd3b179 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.850037] env[63297]: DEBUG oslo_vmware.api [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for the task: (returnval){ [ 1288.850037] env[63297]: value = "task-1697142" [ 1288.850037] env[63297]: _type = "Task" [ 1288.850037] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.859224] env[63297]: DEBUG oslo_vmware.api [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697142, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.939950] env[63297]: DEBUG nova.network.neutron [req-06a323a8-6eaf-49a1-b343-89e2e1d7097e req-e6e5bfce-de3c-4f99-9658-73aa85c36ec3 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updated VIF entry in instance network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1288.940366] env[63297]: DEBUG nova.network.neutron [req-06a323a8-6eaf-49a1-b343-89e2e1d7097e req-e6e5bfce-de3c-4f99-9658-73aa85c36ec3 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updating instance_info_cache with network_info: [{"id": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "address": "fa:16:3e:ed:e8:35", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73c7c5d-39", "ovs_interfaceid": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.969607] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697140, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.016245] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dd0e17-3d91-c196-6a9b-7bacb0e329af, 'name': SearchDatastore_Task, 'duration_secs': 0.024081} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.016558] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.016788] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1289.017021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.017167] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.017343] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1289.017603] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62c2866e-3770-4e38-a727-d51ef4955448 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.029030] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1289.029030] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1289.029030] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74de8b18-d886-4379-bf78-08f7b9317b0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.032603] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1289.032603] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d33d2-846c-c5ac-cf26-3159985b0f66" [ 1289.032603] env[63297]: _type = "Task" [ 1289.032603] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.043319] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d33d2-846c-c5ac-cf26-3159985b0f66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.209444] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c1a8ac-4da6-4147-9ee0-9bafddbea435 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.218031] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682ec6aa-f355-439b-8b33-f9095be9f0d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.250153] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39bffb8-35a2-4fa6-b74a-794dcd9122b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.260489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6155dad1-d6a1-4b5f-94d0-ad495cbd39e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.273159] env[63297]: INFO nova.compute.manager [-] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Took 1.35 seconds to deallocate network for instance. [ 1289.274776] env[63297]: DEBUG nova.compute.provider_tree [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.360651] env[63297]: DEBUG oslo_vmware.api [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Task: {'id': task-1697142, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254091} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.360982] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.361136] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.361357] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.361560] env[63297]: INFO nova.compute.manager [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1289.361812] env[63297]: DEBUG oslo.service.loopingcall [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1289.362052] env[63297]: DEBUG nova.compute.manager [-] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1289.362112] env[63297]: DEBUG nova.network.neutron [-] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1289.443475] env[63297]: DEBUG oslo_concurrency.lockutils [req-06a323a8-6eaf-49a1-b343-89e2e1d7097e req-e6e5bfce-de3c-4f99-9658-73aa85c36ec3 service nova] Releasing lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.469363] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697140, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.544399] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d33d2-846c-c5ac-cf26-3159985b0f66, 'name': SearchDatastore_Task, 'duration_secs': 0.011062} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.545344] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d0f94e8-9b5e-4ee5-b298-be7caffefd6a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.550889] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1289.550889] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524589bf-c10f-49db-fb74-ad3a34bd4deb" [ 1289.550889] env[63297]: _type = "Task" [ 1289.550889] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.559959] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524589bf-c10f-49db-fb74-ad3a34bd4deb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.699097] env[63297]: DEBUG nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1289.728227] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1289.728509] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1289.728670] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1289.728933] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1289.729095] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1289.729242] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1289.729449] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1289.729621] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1289.729808] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1289.729965] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1289.730153] env[63297]: DEBUG nova.virt.hardware [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1289.731035] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415e3594-08b3-4621-8bc3-b855662ebb98 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.741291] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e98c0e5-d9b6-4205-b954-1aeca4b56026 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.778859] env[63297]: DEBUG nova.scheduler.client.report [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1289.783383] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.968703] env[63297]: DEBUG oslo_vmware.api [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697140, 'name': PowerOnVM_Task, 'duration_secs': 1.35435} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.969151] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1289.969476] env[63297]: INFO nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Took 10.24 seconds to spawn the instance on the hypervisor. [ 1289.969753] env[63297]: DEBUG nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1289.970642] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2870e258-e4c2-49ae-b838-c37d217cd7ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.062244] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524589bf-c10f-49db-fb74-ad3a34bd4deb, 'name': SearchDatastore_Task, 'duration_secs': 0.011534} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.062554] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.062779] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b65e8c04-df55-491e-861c-8aa6def8c9be/b65e8c04-df55-491e-861c-8aa6def8c9be.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1290.063058] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49d7c948-5e55-4e77-9320-0396d0532a69 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.070130] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1290.070130] env[63297]: value = "task-1697143" [ 1290.070130] env[63297]: _type = "Task" [ 1290.070130] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.079675] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.118872] env[63297]: DEBUG nova.network.neutron [-] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.176258] env[63297]: DEBUG nova.compute.manager [req-8fcf656d-cf35-453f-847e-17de211e812a req-8958018b-c81f-445e-aad6-993c007ad958 service nova] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Received event network-vif-deleted-fab4c7d4-707f-4b4d-81ff-d6796a6fd27a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1290.176258] env[63297]: DEBUG nova.compute.manager [req-8fcf656d-cf35-453f-847e-17de211e812a req-8958018b-c81f-445e-aad6-993c007ad958 service nova] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Received event network-vif-deleted-9e58a5c1-8922-4504-a29b-1b53dd0ff360 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1290.285222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.286059] env[63297]: DEBUG nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1290.289193] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.076s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.289415] env[63297]: DEBUG nova.objects.instance [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lazy-loading 'resources' on Instance uuid 86a0579f-211c-42bc-925a-e30aaca4e0f5 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1290.319365] env[63297]: DEBUG nova.compute.manager [req-1c11b235-c178-444b-8eae-0661077a7fe7 req-8807cad9-eeea-40dc-ba26-9692967aa1eb service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Received event network-vif-plugged-23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1290.319693] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c11b235-c178-444b-8eae-0661077a7fe7 req-8807cad9-eeea-40dc-ba26-9692967aa1eb service nova] Acquiring lock "b853b581-ea46-4455-8cdb-6ea2f31c22be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.319946] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c11b235-c178-444b-8eae-0661077a7fe7 req-8807cad9-eeea-40dc-ba26-9692967aa1eb service nova] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.320141] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c11b235-c178-444b-8eae-0661077a7fe7 req-8807cad9-eeea-40dc-ba26-9692967aa1eb service nova] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.320300] env[63297]: DEBUG nova.compute.manager [req-1c11b235-c178-444b-8eae-0661077a7fe7 req-8807cad9-eeea-40dc-ba26-9692967aa1eb service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] No waiting events found dispatching network-vif-plugged-23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1290.320589] env[63297]: WARNING nova.compute.manager [req-1c11b235-c178-444b-8eae-0661077a7fe7 req-8807cad9-eeea-40dc-ba26-9692967aa1eb service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Received unexpected event network-vif-plugged-23a6c287-76ba-4a4d-9cfa-cd71492215be for instance with vm_state building and task_state spawning. [ 1290.438616] env[63297]: DEBUG nova.network.neutron [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Successfully updated port: 23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1290.491727] env[63297]: INFO nova.compute.manager [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Took 42.40 seconds to build instance. [ 1290.581359] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697143, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.621254] env[63297]: INFO nova.compute.manager [-] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Took 1.26 seconds to deallocate network for instance. [ 1290.796850] env[63297]: DEBUG nova.compute.utils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1290.798970] env[63297]: DEBUG nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1290.799092] env[63297]: DEBUG nova.network.neutron [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1290.881515] env[63297]: DEBUG nova.policy [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c54dc9a58e546038e03377176e94a85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd688e8df7eb848858ff5dc4203ee0550', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1290.944689] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.944689] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquired lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.944689] env[63297]: DEBUG nova.network.neutron [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1290.994622] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e3481ae4-b1e0-4a7d-9f33-cd2b848f7408 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.132s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.081714] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704888} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.082013] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b65e8c04-df55-491e-861c-8aa6def8c9be/b65e8c04-df55-491e-861c-8aa6def8c9be.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1291.082734] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1291.082867] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2594a2ef-4d9b-4213-b65e-933209b82841 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.091325] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1291.091325] env[63297]: value = "task-1697144" [ 1291.091325] env[63297]: _type = "Task" [ 1291.091325] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.099877] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.131020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.301959] env[63297]: DEBUG nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1291.312184] env[63297]: DEBUG nova.network.neutron [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Successfully created port: 5c09f690-4a73-47ed-860e-be77651b0206 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1291.426168] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfaede6-3691-4ed0-a77b-dfeb1c1cd9af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.439235] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611288f7-4462-4a1e-ad04-360e2ab892dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.484059] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd6ee43-4fde-4af3-8b42-4a30d658a856 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.494660] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb691f17-3c94-444f-9bfb-79c24b2e528e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.499934] env[63297]: DEBUG nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1291.514180] env[63297]: DEBUG nova.compute.provider_tree [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.540019] env[63297]: DEBUG nova.network.neutron [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1291.603833] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06944} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.604312] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1291.605185] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb04c73e-23bf-484e-afa3-624860b3993e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.633892] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] b65e8c04-df55-491e-861c-8aa6def8c9be/b65e8c04-df55-491e-861c-8aa6def8c9be.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1291.634266] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4681d470-173b-4ee6-a5f7-fbb9114905fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.660313] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1291.660313] env[63297]: value = "task-1697145" [ 1291.660313] env[63297]: _type = "Task" [ 1291.660313] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.671187] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.021040] env[63297]: DEBUG nova.network.neutron [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Updating instance_info_cache with network_info: [{"id": "23a6c287-76ba-4a4d-9cfa-cd71492215be", "address": "fa:16:3e:57:5f:ca", "network": {"id": "43d5b50a-9475-40f0-ac20-45a93157aa57", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-64163963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2deadaf96df7430aba8594c7f98facd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a6c287-76", "ovs_interfaceid": "23a6c287-76ba-4a4d-9cfa-cd71492215be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.023384] env[63297]: DEBUG nova.scheduler.client.report [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1292.028192] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.171332] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697145, 'name': ReconfigVM_Task, 'duration_secs': 0.292199} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.172017] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Reconfigured VM instance instance-00000020 to attach disk [datastore1] b65e8c04-df55-491e-861c-8aa6def8c9be/b65e8c04-df55-491e-861c-8aa6def8c9be.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1292.172722] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ca8b34d-4939-4676-8842-b0283e7ebe08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.181684] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1292.181684] env[63297]: value = "task-1697146" [ 1292.181684] env[63297]: _type = "Task" [ 1292.181684] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.189497] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697146, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.316755] env[63297]: DEBUG nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1292.351560] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1292.351875] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1292.351997] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1292.352194] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1292.352429] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1292.352637] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1292.352862] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1292.353354] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1292.353587] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1292.353809] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1292.354096] env[63297]: DEBUG nova.virt.hardware [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1292.356347] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab84405-18bf-42da-8306-10501e4a6bf6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.365596] env[63297]: DEBUG oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5231c4d9-21e6-e464-94ae-e5a23cc3c8a1/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1292.366894] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de3bf6a-ea5b-4264-a4ff-eff72ce6629a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.373832] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35f2a45-42f6-4850-bb89-d61edf896a08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.380020] env[63297]: DEBUG oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5231c4d9-21e6-e464-94ae-e5a23cc3c8a1/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1292.380204] env[63297]: ERROR oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5231c4d9-21e6-e464-94ae-e5a23cc3c8a1/disk-0.vmdk due to incomplete transfer. [ 1292.388066] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-73fb4874-b31d-46bc-a5c5-d1d0920074f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.396104] env[63297]: DEBUG oslo_vmware.rw_handles [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5231c4d9-21e6-e464-94ae-e5a23cc3c8a1/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1292.396273] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Uploaded image 861a16f6-8868-469a-b39d-c225f8b1c8b3 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1292.398787] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1292.399063] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9ef62122-c894-469d-a004-98c3fdcac2a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.404926] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1292.404926] env[63297]: value = "task-1697148" [ 1292.404926] env[63297]: _type = "Task" [ 1292.404926] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.414195] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697148, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.530340] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Releasing lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1292.530909] env[63297]: DEBUG nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Instance network_info: |[{"id": "23a6c287-76ba-4a4d-9cfa-cd71492215be", "address": "fa:16:3e:57:5f:ca", "network": {"id": "43d5b50a-9475-40f0-ac20-45a93157aa57", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-64163963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2deadaf96df7430aba8594c7f98facd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a6c287-76", "ovs_interfaceid": "23a6c287-76ba-4a4d-9cfa-cd71492215be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1292.532234] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.243s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.535650] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:5f:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23a6c287-76ba-4a4d-9cfa-cd71492215be', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1292.544598] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Creating folder: Project (2deadaf96df7430aba8594c7f98facd2). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1292.544598] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.887s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.545626] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4bc16c5-de2e-4329-8b33-13cc8a34a84f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.556192] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Created folder: Project (2deadaf96df7430aba8594c7f98facd2) in parent group-v353718. [ 1292.556192] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Creating folder: Instances. Parent ref: group-v353825. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1292.556563] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b971c5d-d874-45af-8022-5b132f96831a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.559641] env[63297]: INFO nova.scheduler.client.report [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Deleted allocations for instance 86a0579f-211c-42bc-925a-e30aaca4e0f5 [ 1292.571040] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Created folder: Instances in parent group-v353825. [ 1292.571040] env[63297]: DEBUG oslo.service.loopingcall [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1292.571040] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1292.571040] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4589ed8c-3926-40cc-a22c-08cadea4911c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.592995] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1292.592995] env[63297]: value = "task-1697151" [ 1292.592995] env[63297]: _type = "Task" [ 1292.592995] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.605887] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697151, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.667995] env[63297]: DEBUG nova.compute.manager [req-3ab77aa3-6ad9-42c0-9b6b-ca1ff51c9ab6 req-7510c31d-f8e0-42d9-b3a6-11063e862456 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Received event network-changed-7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1292.667995] env[63297]: DEBUG nova.compute.manager [req-3ab77aa3-6ad9-42c0-9b6b-ca1ff51c9ab6 req-7510c31d-f8e0-42d9-b3a6-11063e862456 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Refreshing instance network info cache due to event network-changed-7afec2db-71a8-42c9-aabb-988b70a71ede. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1292.668261] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ab77aa3-6ad9-42c0-9b6b-ca1ff51c9ab6 req-7510c31d-f8e0-42d9-b3a6-11063e862456 service nova] Acquiring lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.668328] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ab77aa3-6ad9-42c0-9b6b-ca1ff51c9ab6 req-7510c31d-f8e0-42d9-b3a6-11063e862456 service nova] Acquired lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.668554] env[63297]: DEBUG nova.network.neutron [req-3ab77aa3-6ad9-42c0-9b6b-ca1ff51c9ab6 req-7510c31d-f8e0-42d9-b3a6-11063e862456 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Refreshing network info cache for port 7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1292.692214] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697146, 'name': Rename_Task, 'duration_secs': 0.177473} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.692506] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1292.692808] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc83217c-043c-492b-bede-74a57c24052c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.701555] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1292.701555] env[63297]: value = "task-1697152" [ 1292.701555] env[63297]: _type = "Task" [ 1292.701555] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.710289] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.742581] env[63297]: DEBUG nova.compute.manager [req-433bb02c-7bec-4dd8-a0e0-4df72606a2a6 req-6f26a8d4-6e53-480b-aeab-9ec915c11a98 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Received event network-changed-23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1292.742581] env[63297]: DEBUG nova.compute.manager [req-433bb02c-7bec-4dd8-a0e0-4df72606a2a6 req-6f26a8d4-6e53-480b-aeab-9ec915c11a98 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Refreshing instance network info cache due to event network-changed-23a6c287-76ba-4a4d-9cfa-cd71492215be. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1292.742786] env[63297]: DEBUG oslo_concurrency.lockutils [req-433bb02c-7bec-4dd8-a0e0-4df72606a2a6 req-6f26a8d4-6e53-480b-aeab-9ec915c11a98 service nova] Acquiring lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.742999] env[63297]: DEBUG oslo_concurrency.lockutils [req-433bb02c-7bec-4dd8-a0e0-4df72606a2a6 req-6f26a8d4-6e53-480b-aeab-9ec915c11a98 service nova] Acquired lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.743499] env[63297]: DEBUG nova.network.neutron [req-433bb02c-7bec-4dd8-a0e0-4df72606a2a6 req-6f26a8d4-6e53-480b-aeab-9ec915c11a98 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Refreshing network info cache for port 23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1292.914785] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697148, 'name': Destroy_Task, 'duration_secs': 0.440796} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.915144] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Destroyed the VM [ 1292.915445] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1292.915744] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e3b16ef3-8437-4218-8bc7-f1e7eb4e89fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.922605] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1292.922605] env[63297]: value = "task-1697153" [ 1292.922605] env[63297]: _type = "Task" [ 1292.922605] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.936889] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697153, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.068646] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b24436-e6ba-437f-9c39-0188d9d36e9d tempest-ServersAaction247Test-1431704946 tempest-ServersAaction247Test-1431704946-project-member] Lock "86a0579f-211c-42bc-925a-e30aaca4e0f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.640s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.103009] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697151, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.213094] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697152, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.390231] env[63297]: DEBUG nova.network.neutron [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Successfully updated port: 5c09f690-4a73-47ed-860e-be77651b0206 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1293.433049] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697153, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.561419] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Applying migration context for instance 9b1306f9-4b0a-4116-8e79-271478f33490 as it has an incoming, in-progress migration 4720dc5e-30b9-4f95-8509-ba35c8eb57ef. Migration status is confirming {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1293.563611] env[63297]: INFO nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating resource usage from migration 4720dc5e-30b9-4f95-8509-ba35c8eb57ef [ 1293.599022] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 81920a24-f406-4923-98b7-cc0f3d0ccc8b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1293.599180] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ef851d71-788d-42f8-a824-5d30a89e957b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.599299] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ef57101e-1d8a-4ad5-ad68-cad2dbea33d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.599441] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b5d34058-fa3e-4806-97e5-638bbbffaeb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.599613] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 35c68986-51b5-43ba-a076-aca3c86d68bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.599729] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 4438e230-0589-48ae-8848-d1f8414efa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.599858] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 746742ac-8d7a-466b-8bc0-043cb5422111 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1293.599976] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance eff06e8a-8341-4d5e-b6dd-a585be4a21ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.600128] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 754e64ec-b6fa-49d8-9de6-ef38918378fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.600239] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b14e8466-68ab-4705-a439-6db961a149b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.600348] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 14b4518e-044a-451a-845d-fa3742e5b3e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.600456] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Migration 4720dc5e-30b9-4f95-8509-ba35c8eb57ef is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1293.600560] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 9b1306f9-4b0a-4116-8e79-271478f33490 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.600679] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 8c10c573-de56-4c72-959a-65bf53b805a5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1293.600796] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1293.600903] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance eebcad60-4b8a-4fa0-b846-b65972c4c69c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.601017] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 92439795-6240-4103-940b-de6d87738570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.601138] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 87fa97a7-a8a5-4184-b52a-b02ad5468127 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1293.601252] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 22a927ad-c2af-4814-b728-ec31b76a34d4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1293.601362] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.601468] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b65e8c04-df55-491e-861c-8aa6def8c9be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.601570] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b853b581-ea46-4455-8cdb-6ea2f31c22be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.601705] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 581f9d48-dcb8-4a34-928b-64087a9f966b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.607020] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697151, 'name': CreateVM_Task, 'duration_secs': 0.815386} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.609704] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1293.610414] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.610616] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.610937] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1293.611487] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fde7751-0463-4ab4-a925-80e85f290d08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.616105] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1293.616105] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52344135-482c-743e-cd2d-91baacfc3091" [ 1293.616105] env[63297]: _type = "Task" [ 1293.616105] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.624780] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52344135-482c-743e-cd2d-91baacfc3091, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.643463] env[63297]: DEBUG nova.network.neutron [req-3ab77aa3-6ad9-42c0-9b6b-ca1ff51c9ab6 req-7510c31d-f8e0-42d9-b3a6-11063e862456 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updated VIF entry in instance network info cache for port 7afec2db-71a8-42c9-aabb-988b70a71ede. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1293.643831] env[63297]: DEBUG nova.network.neutron [req-3ab77aa3-6ad9-42c0-9b6b-ca1ff51c9ab6 req-7510c31d-f8e0-42d9-b3a6-11063e862456 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updating instance_info_cache with network_info: [{"id": "7afec2db-71a8-42c9-aabb-988b70a71ede", "address": "fa:16:3e:49:e6:3e", "network": {"id": "c5f8653d-b207-4631-8f20-252c17df44b3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2063892400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a056927390f4ff2a0305bd7e8ad5f3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afec2db-71", "ovs_interfaceid": "7afec2db-71a8-42c9-aabb-988b70a71ede", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.669421] env[63297]: DEBUG nova.network.neutron [req-433bb02c-7bec-4dd8-a0e0-4df72606a2a6 req-6f26a8d4-6e53-480b-aeab-9ec915c11a98 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Updated VIF entry in instance network info cache for port 23a6c287-76ba-4a4d-9cfa-cd71492215be. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1293.670072] env[63297]: DEBUG nova.network.neutron [req-433bb02c-7bec-4dd8-a0e0-4df72606a2a6 req-6f26a8d4-6e53-480b-aeab-9ec915c11a98 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Updating instance_info_cache with network_info: [{"id": "23a6c287-76ba-4a4d-9cfa-cd71492215be", "address": "fa:16:3e:57:5f:ca", "network": {"id": "43d5b50a-9475-40f0-ac20-45a93157aa57", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-64163963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2deadaf96df7430aba8594c7f98facd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a6c287-76", "ovs_interfaceid": "23a6c287-76ba-4a4d-9cfa-cd71492215be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.712273] env[63297]: DEBUG oslo_vmware.api [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697152, 'name': PowerOnVM_Task, 'duration_secs': 0.535824} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.712544] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1293.712748] env[63297]: INFO nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1293.712927] env[63297]: DEBUG nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1293.713696] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e26360-d338-418c-9942-11a4187e5b50 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.893244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "refresh_cache-581f9d48-dcb8-4a34-928b-64087a9f966b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.893415] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquired lock "refresh_cache-581f9d48-dcb8-4a34-928b-64087a9f966b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.893570] env[63297]: DEBUG nova.network.neutron [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1293.933741] env[63297]: DEBUG oslo_vmware.api [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697153, 'name': RemoveSnapshot_Task, 'duration_secs': 0.764977} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.933988] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1293.934336] env[63297]: INFO nova.compute.manager [None req-73735adf-57a9-4ba3-b64a-63f9b872911b tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Took 14.06 seconds to snapshot the instance on the hypervisor. [ 1294.110588] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fba9040d-f904-44a1-8785-14d4696ea939 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1294.128413] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52344135-482c-743e-cd2d-91baacfc3091, 'name': SearchDatastore_Task, 'duration_secs': 0.014598} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.129152] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.129152] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1294.129287] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.129386] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.129535] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1294.129799] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b20a4c25-df03-4792-95fb-068567ca0dd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.140352] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1294.140527] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1294.141318] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0883405-113e-4534-b57f-1bccb69c1f0f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.147379] env[63297]: DEBUG oslo_concurrency.lockutils [req-3ab77aa3-6ad9-42c0-9b6b-ca1ff51c9ab6 req-7510c31d-f8e0-42d9-b3a6-11063e862456 service nova] Releasing lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.147534] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1294.147534] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5271c28a-d410-c3ae-61f2-bceb5c39d3ee" [ 1294.147534] env[63297]: _type = "Task" [ 1294.147534] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.155023] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5271c28a-d410-c3ae-61f2-bceb5c39d3ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.173168] env[63297]: DEBUG oslo_concurrency.lockutils [req-433bb02c-7bec-4dd8-a0e0-4df72606a2a6 req-6f26a8d4-6e53-480b-aeab-9ec915c11a98 service nova] Releasing lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.232884] env[63297]: INFO nova.compute.manager [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Took 44.32 seconds to build instance. [ 1294.439717] env[63297]: DEBUG nova.network.neutron [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1294.603309] env[63297]: DEBUG nova.network.neutron [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Updating instance_info_cache with network_info: [{"id": "5c09f690-4a73-47ed-860e-be77651b0206", "address": "fa:16:3e:69:eb:fb", "network": {"id": "22aea27d-d369-450e-9765-2000279770bc", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1350097298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d688e8df7eb848858ff5dc4203ee0550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c09f690-4a", "ovs_interfaceid": "5c09f690-4a73-47ed-860e-be77651b0206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.613663] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1294.658218] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5271c28a-d410-c3ae-61f2-bceb5c39d3ee, 'name': SearchDatastore_Task, 'duration_secs': 0.05431} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.659202] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7498c97-f478-475c-b991-151bb26c6bcb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.665009] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1294.665009] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5228026e-7965-47cc-48f6-13908f7c3337" [ 1294.665009] env[63297]: _type = "Task" [ 1294.665009] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.672910] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5228026e-7965-47cc-48f6-13908f7c3337, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.738969] env[63297]: DEBUG oslo_concurrency.lockutils [None req-88e1796a-efed-44ef-a7c4-3fefce60d4ca tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "b65e8c04-df55-491e-861c-8aa6def8c9be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.559s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.105979] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Releasing lock "refresh_cache-581f9d48-dcb8-4a34-928b-64087a9f966b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.106588] env[63297]: DEBUG nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Instance network_info: |[{"id": "5c09f690-4a73-47ed-860e-be77651b0206", "address": "fa:16:3e:69:eb:fb", "network": {"id": "22aea27d-d369-450e-9765-2000279770bc", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1350097298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d688e8df7eb848858ff5dc4203ee0550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c09f690-4a", "ovs_interfaceid": "5c09f690-4a73-47ed-860e-be77651b0206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1295.107146] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:eb:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c09f690-4a73-47ed-860e-be77651b0206', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1295.116479] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Creating folder: Project (d688e8df7eb848858ff5dc4203ee0550). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1295.117816] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 8bc4bb67-bc00-44c6-9c83-c0a1072142e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1295.120524] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9c3b4c0-5e29-4035-8edd-460bad9ff4c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.131794] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Created folder: Project (d688e8df7eb848858ff5dc4203ee0550) in parent group-v353718. [ 1295.132515] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Creating folder: Instances. Parent ref: group-v353828. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1295.133456] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37a4a99b-1b93-46ff-9a5c-bf16d23e5be5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.142446] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Created folder: Instances in parent group-v353828. [ 1295.142912] env[63297]: DEBUG oslo.service.loopingcall [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1295.143705] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1295.143969] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6824d906-6e77-42d7-870f-c05c8712cd7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.167274] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1295.167274] env[63297]: value = "task-1697157" [ 1295.167274] env[63297]: _type = "Task" [ 1295.167274] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.180166] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5228026e-7965-47cc-48f6-13908f7c3337, 'name': SearchDatastore_Task, 'duration_secs': 0.009805} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.186758] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.186758] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b853b581-ea46-4455-8cdb-6ea2f31c22be/b853b581-ea46-4455-8cdb-6ea2f31c22be.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1295.186758] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697157, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.186758] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4402d9ac-1b84-4750-a0a9-90b7ba162cfb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.195543] env[63297]: DEBUG nova.compute.manager [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Received event network-vif-plugged-5c09f690-4a73-47ed-860e-be77651b0206 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1295.195543] env[63297]: DEBUG oslo_concurrency.lockutils [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] Acquiring lock "581f9d48-dcb8-4a34-928b-64087a9f966b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.195673] env[63297]: DEBUG oslo_concurrency.lockutils [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] Lock "581f9d48-dcb8-4a34-928b-64087a9f966b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.195788] env[63297]: DEBUG oslo_concurrency.lockutils [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] Lock "581f9d48-dcb8-4a34-928b-64087a9f966b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.196030] env[63297]: DEBUG nova.compute.manager [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] No waiting events found dispatching network-vif-plugged-5c09f690-4a73-47ed-860e-be77651b0206 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1295.196115] env[63297]: WARNING nova.compute.manager [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Received unexpected event network-vif-plugged-5c09f690-4a73-47ed-860e-be77651b0206 for instance with vm_state building and task_state spawning. [ 1295.196295] env[63297]: DEBUG nova.compute.manager [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Received event network-changed-5c09f690-4a73-47ed-860e-be77651b0206 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1295.196414] env[63297]: DEBUG nova.compute.manager [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Refreshing instance network info cache due to event network-changed-5c09f690-4a73-47ed-860e-be77651b0206. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1295.196597] env[63297]: DEBUG oslo_concurrency.lockutils [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] Acquiring lock "refresh_cache-581f9d48-dcb8-4a34-928b-64087a9f966b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.196732] env[63297]: DEBUG oslo_concurrency.lockutils [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] Acquired lock "refresh_cache-581f9d48-dcb8-4a34-928b-64087a9f966b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.196884] env[63297]: DEBUG nova.network.neutron [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Refreshing network info cache for port 5c09f690-4a73-47ed-860e-be77651b0206 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1295.200609] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1295.200609] env[63297]: value = "task-1697158" [ 1295.200609] env[63297]: _type = "Task" [ 1295.200609] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.210992] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697158, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.244552] env[63297]: DEBUG nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1295.624347] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 2d7b237e-f86d-42b1-ab04-320f0012a2d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1295.682402] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697157, 'name': CreateVM_Task, 'duration_secs': 0.393828} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.682651] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1295.683371] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.683536] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.683912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1295.684211] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-521d9fc2-a0a7-40a8-8cf0-a9c579c5a876 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.690685] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1295.690685] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dadc6a-d067-1ff1-3dfe-a12cff10f5ce" [ 1295.690685] env[63297]: _type = "Task" [ 1295.690685] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.701482] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dadc6a-d067-1ff1-3dfe-a12cff10f5ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.714143] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697158, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.769020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.127858] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 8fa5fef6-8768-4e24-aab3-db56a10588c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1296.201554] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dadc6a-d067-1ff1-3dfe-a12cff10f5ce, 'name': SearchDatastore_Task, 'duration_secs': 0.053982} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.201891] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.202169] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1296.202407] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.202555] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.202734] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.203637] env[63297]: DEBUG nova.network.neutron [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Updated VIF entry in instance network info cache for port 5c09f690-4a73-47ed-860e-be77651b0206. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1296.203973] env[63297]: DEBUG nova.network.neutron [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Updating instance_info_cache with network_info: [{"id": "5c09f690-4a73-47ed-860e-be77651b0206", "address": "fa:16:3e:69:eb:fb", "network": {"id": "22aea27d-d369-450e-9765-2000279770bc", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1350097298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d688e8df7eb848858ff5dc4203ee0550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c09f690-4a", "ovs_interfaceid": "5c09f690-4a73-47ed-860e-be77651b0206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.205591] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8705a35b-6d96-423e-9529-a33fb09f6574 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.217819] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697158, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.740436} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.219659] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b853b581-ea46-4455-8cdb-6ea2f31c22be/b853b581-ea46-4455-8cdb-6ea2f31c22be.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1296.219910] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1296.220203] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.220361] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1296.221061] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c1e854d-3f3b-490f-9c96-73132825c80f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.223170] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0afaf97e-c447-4c36-add4-7e465531ce29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.229695] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1296.229695] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d094c-78cd-d587-197d-7821dd5dd78d" [ 1296.229695] env[63297]: _type = "Task" [ 1296.229695] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.234411] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1296.234411] env[63297]: value = "task-1697159" [ 1296.234411] env[63297]: _type = "Task" [ 1296.234411] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.241243] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d094c-78cd-d587-197d-7821dd5dd78d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.245649] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697159, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.631269] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance d15a7e98-755b-4c5c-ba34-dc5fc3f8846d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1296.712829] env[63297]: DEBUG oslo_concurrency.lockutils [req-433c123b-a183-4419-81f2-92942d7d27e2 req-c5da3610-a1c2-4463-94a7-29a5dcc34fc0 service nova] Releasing lock "refresh_cache-581f9d48-dcb8-4a34-928b-64087a9f966b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.743795] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d094c-78cd-d587-197d-7821dd5dd78d, 'name': SearchDatastore_Task, 'duration_secs': 0.016322} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.745286] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-253daaed-9879-4a5d-a5b9-93346bf1c65d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.750283] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697159, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06717} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.750913] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1296.751867] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82db1f88-3e02-486e-b984-7b75c64ad993 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.755823] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1296.755823] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d7f8eb-aab1-93d0-379a-1ef0d0da04ba" [ 1296.755823] env[63297]: _type = "Task" [ 1296.755823] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.778337] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] b853b581-ea46-4455-8cdb-6ea2f31c22be/b853b581-ea46-4455-8cdb-6ea2f31c22be.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1296.779458] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0fdba0d-e521-41d4-b5d1-082aa27c5ab7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.797718] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d7f8eb-aab1-93d0-379a-1ef0d0da04ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.802849] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1296.802849] env[63297]: value = "task-1697161" [ 1296.802849] env[63297]: _type = "Task" [ 1296.802849] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.811491] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697161, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.136800] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance f429dd9b-be6c-4e90-876b-3a3931fb1c4a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1297.268080] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d7f8eb-aab1-93d0-379a-1ef0d0da04ba, 'name': SearchDatastore_Task, 'duration_secs': 0.037252} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.270024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.270024] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 581f9d48-dcb8-4a34-928b-64087a9f966b/581f9d48-dcb8-4a34-928b-64087a9f966b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1297.270024] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41ef00b5-34eb-4968-a542-3111ef04fc5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.277363] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1297.277363] env[63297]: value = "task-1697162" [ 1297.277363] env[63297]: _type = "Task" [ 1297.277363] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.287665] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.316428] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.350871] env[63297]: DEBUG nova.compute.manager [req-3e99eb17-fb73-4156-b0ff-4f55f4daafcd req-99005eb1-990d-4d34-8cb3-fe44c33e32e9 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Received event network-changed-7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1297.350871] env[63297]: DEBUG nova.compute.manager [req-3e99eb17-fb73-4156-b0ff-4f55f4daafcd req-99005eb1-990d-4d34-8cb3-fe44c33e32e9 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Refreshing instance network info cache due to event network-changed-7afec2db-71a8-42c9-aabb-988b70a71ede. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1297.350871] env[63297]: DEBUG oslo_concurrency.lockutils [req-3e99eb17-fb73-4156-b0ff-4f55f4daafcd req-99005eb1-990d-4d34-8cb3-fe44c33e32e9 service nova] Acquiring lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.350871] env[63297]: DEBUG oslo_concurrency.lockutils [req-3e99eb17-fb73-4156-b0ff-4f55f4daafcd req-99005eb1-990d-4d34-8cb3-fe44c33e32e9 service nova] Acquired lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.351220] env[63297]: DEBUG nova.network.neutron [req-3e99eb17-fb73-4156-b0ff-4f55f4daafcd req-99005eb1-990d-4d34-8cb3-fe44c33e32e9 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Refreshing network info cache for port 7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1297.639315] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5e158880-81a6-4d35-b1df-6fd59ba4a8ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1297.691368] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquiring lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.692794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.692794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquiring lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.692794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.692794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.697551] env[63297]: INFO nova.compute.manager [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Terminating instance [ 1297.700787] env[63297]: DEBUG nova.compute.manager [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1297.701394] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1297.703028] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd739e54-34c1-4d69-8204-18e9e802233f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.713477] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1297.713477] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08bd1f3b-8520-4b84-aa9b-4753923ffad6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.722877] env[63297]: DEBUG oslo_vmware.api [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1297.722877] env[63297]: value = "task-1697163" [ 1297.722877] env[63297]: _type = "Task" [ 1297.722877] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.737063] env[63297]: DEBUG oslo_vmware.api [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.789011] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697162, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.816360] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "272180b9-e79b-4714-b28b-470937509f42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.816592] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "272180b9-e79b-4714-b28b-470937509f42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.824842] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697161, 'name': ReconfigVM_Task, 'duration_secs': 0.758542} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.825152] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Reconfigured VM instance instance-00000021 to attach disk [datastore1] b853b581-ea46-4455-8cdb-6ea2f31c22be/b853b581-ea46-4455-8cdb-6ea2f31c22be.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1297.825804] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa1237d0-0751-4e99-a110-53f943fcd9e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.838265] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1297.838265] env[63297]: value = "task-1697164" [ 1297.838265] env[63297]: _type = "Task" [ 1297.838265] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.848305] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697164, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.142879] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 99cc8af3-5c18-4839-94db-996861e0c276 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1298.237188] env[63297]: DEBUG oslo_vmware.api [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697163, 'name': PowerOffVM_Task, 'duration_secs': 0.316157} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.238055] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1298.238889] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1298.241682] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcd606c5-94bf-43cb-bb79-edefb0f86bb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.287200] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626417} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.287462] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 581f9d48-dcb8-4a34-928b-64087a9f966b/581f9d48-dcb8-4a34-928b-64087a9f966b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1298.287670] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1298.288642] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87b1efd1-ee9d-4615-a48f-d3081d461cc7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.296496] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1298.296496] env[63297]: value = "task-1697166" [ 1298.296496] env[63297]: _type = "Task" [ 1298.296496] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.304563] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.330716] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1298.330958] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1298.331151] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Deleting the datastore file [datastore1] 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1298.331411] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-782b3c35-da6d-4b03-9a71-a7ccdc1226b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.338471] env[63297]: DEBUG oslo_vmware.api [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for the task: (returnval){ [ 1298.338471] env[63297]: value = "task-1697167" [ 1298.338471] env[63297]: _type = "Task" [ 1298.338471] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.354247] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697164, 'name': Rename_Task, 'duration_secs': 0.177299} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.354479] env[63297]: DEBUG oslo_vmware.api [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.354720] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1298.354946] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8308d14f-35dc-4a55-9361-c375d5543ed6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.361405] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1298.361405] env[63297]: value = "task-1697168" [ 1298.361405] env[63297]: _type = "Task" [ 1298.361405] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.370171] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.414129] env[63297]: DEBUG nova.network.neutron [req-3e99eb17-fb73-4156-b0ff-4f55f4daafcd req-99005eb1-990d-4d34-8cb3-fe44c33e32e9 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updated VIF entry in instance network info cache for port 7afec2db-71a8-42c9-aabb-988b70a71ede. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1298.414494] env[63297]: DEBUG nova.network.neutron [req-3e99eb17-fb73-4156-b0ff-4f55f4daafcd req-99005eb1-990d-4d34-8cb3-fe44c33e32e9 service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updating instance_info_cache with network_info: [{"id": "7afec2db-71a8-42c9-aabb-988b70a71ede", "address": "fa:16:3e:49:e6:3e", "network": {"id": "c5f8653d-b207-4631-8f20-252c17df44b3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2063892400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a056927390f4ff2a0305bd7e8ad5f3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afec2db-71", "ovs_interfaceid": "7afec2db-71a8-42c9-aabb-988b70a71ede", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.649552] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 4e6b1296-9e19-4047-9c38-dc94c686d0cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1298.807950] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145709} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.808297] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1298.809421] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ba6789-6450-481e-af8c-7dea0e96f81e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.832416] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 581f9d48-dcb8-4a34-928b-64087a9f966b/581f9d48-dcb8-4a34-928b-64087a9f966b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1298.832767] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cf0fc35-1cfe-4237-b33d-d6c6f02319fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.857615] env[63297]: DEBUG oslo_vmware.api [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Task: {'id': task-1697167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188128} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.858869] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1298.859075] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1298.859287] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1298.859456] env[63297]: INFO nova.compute.manager [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1298.859691] env[63297]: DEBUG oslo.service.loopingcall [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1298.859974] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1298.859974] env[63297]: value = "task-1697170" [ 1298.859974] env[63297]: _type = "Task" [ 1298.859974] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.860164] env[63297]: DEBUG nova.compute.manager [-] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1298.860254] env[63297]: DEBUG nova.network.neutron [-] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1298.876814] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697170, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.877798] env[63297]: DEBUG oslo_vmware.api [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697168, 'name': PowerOnVM_Task, 'duration_secs': 0.495189} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.877798] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1298.877798] env[63297]: INFO nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Took 9.18 seconds to spawn the instance on the hypervisor. [ 1298.877798] env[63297]: DEBUG nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1298.878575] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9662b1a3-f997-4590-b16d-4ff822dce656 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.916825] env[63297]: DEBUG oslo_concurrency.lockutils [req-3e99eb17-fb73-4156-b0ff-4f55f4daafcd req-99005eb1-990d-4d34-8cb3-fe44c33e32e9 service nova] Releasing lock "refresh_cache-862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1299.155065] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5124f7fb-1293-4964-98c4-426ecfce7d10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1299.155438] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1299.155590] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1299.158408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquiring lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.158644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.377427] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697170, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.384417] env[63297]: DEBUG nova.compute.manager [req-28183310-b8d0-43e6-aaea-38e894208333 req-d0e51ace-3e3d-4999-8986-b909c924072f service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Received event network-vif-deleted-7afec2db-71a8-42c9-aabb-988b70a71ede {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1299.384625] env[63297]: INFO nova.compute.manager [req-28183310-b8d0-43e6-aaea-38e894208333 req-d0e51ace-3e3d-4999-8986-b909c924072f service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Neutron deleted interface 7afec2db-71a8-42c9-aabb-988b70a71ede; detaching it from the instance and deleting it from the info cache [ 1299.384788] env[63297]: DEBUG nova.network.neutron [req-28183310-b8d0-43e6-aaea-38e894208333 req-d0e51ace-3e3d-4999-8986-b909c924072f service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.399626] env[63297]: INFO nova.compute.manager [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Took 45.17 seconds to build instance. [ 1299.671382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.671382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.671382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.671567] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.671786] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.676043] env[63297]: INFO nova.compute.manager [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Terminating instance [ 1299.678893] env[63297]: DEBUG nova.compute.manager [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1299.679102] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1299.680031] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f332a3-2ced-4c4a-b651-a2aad3f42aeb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.687737] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.690300] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87fc8a0f-d809-47a4-ad95-ff65ef89aae8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.702540] env[63297]: DEBUG oslo_vmware.api [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1299.702540] env[63297]: value = "task-1697171" [ 1299.702540] env[63297]: _type = "Task" [ 1299.702540] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.708983] env[63297]: DEBUG nova.network.neutron [-] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.718808] env[63297]: DEBUG oslo_vmware.api [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.753058] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90a7c7f-8ca8-4543-b556-dffcd46ce738 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.763350] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d45d4a6-5986-453a-b6b7-98dbf358f030 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.795226] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fc4633-538e-4e24-a259-1568a558d478 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.803160] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2b1834-ee2b-48fe-ab6d-5dbd86d3887b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.817122] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.872929] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697170, 'name': ReconfigVM_Task, 'duration_secs': 0.628307} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.873612] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 581f9d48-dcb8-4a34-928b-64087a9f966b/581f9d48-dcb8-4a34-928b-64087a9f966b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1299.873855] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-296b8cb4-8004-4733-87e3-1277bc4c576a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.880361] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1299.880361] env[63297]: value = "task-1697172" [ 1299.880361] env[63297]: _type = "Task" [ 1299.880361] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.888749] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697172, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.888977] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99d655d8-ec8f-4a86-a5d0-7b1acae8e564 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.896876] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ff033c-ae30-45cc-b6d7-28148c45089b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.907487] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf3bbe08-3e2f-40ab-a7a2-cd6477c455ce tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.090s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.933583] env[63297]: DEBUG nova.compute.manager [req-28183310-b8d0-43e6-aaea-38e894208333 req-d0e51ace-3e3d-4999-8986-b909c924072f service nova] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Detach interface failed, port_id=7afec2db-71a8-42c9-aabb-988b70a71ede, reason: Instance 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1300.198119] env[63297]: DEBUG nova.compute.manager [req-3b260dfa-c93c-4b14-9759-a9424a7ed7fd req-57751330-4891-4ce5-98ed-947121389293 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Received event network-changed-23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1300.198422] env[63297]: DEBUG nova.compute.manager [req-3b260dfa-c93c-4b14-9759-a9424a7ed7fd req-57751330-4891-4ce5-98ed-947121389293 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Refreshing instance network info cache due to event network-changed-23a6c287-76ba-4a4d-9cfa-cd71492215be. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1300.198454] env[63297]: DEBUG oslo_concurrency.lockutils [req-3b260dfa-c93c-4b14-9759-a9424a7ed7fd req-57751330-4891-4ce5-98ed-947121389293 service nova] Acquiring lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.198571] env[63297]: DEBUG oslo_concurrency.lockutils [req-3b260dfa-c93c-4b14-9759-a9424a7ed7fd req-57751330-4891-4ce5-98ed-947121389293 service nova] Acquired lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.198726] env[63297]: DEBUG nova.network.neutron [req-3b260dfa-c93c-4b14-9759-a9424a7ed7fd req-57751330-4891-4ce5-98ed-947121389293 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Refreshing network info cache for port 23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1300.213073] env[63297]: INFO nova.compute.manager [-] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Took 1.35 seconds to deallocate network for instance. [ 1300.213537] env[63297]: DEBUG oslo_vmware.api [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697171, 'name': PowerOffVM_Task, 'duration_secs': 0.206937} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.215040] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1300.215227] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1300.218232] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0c58063-8702-4623-a15e-d2af5fb35b9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.305727] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1300.305983] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1300.306143] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Deleting the datastore file [datastore1] eff06e8a-8341-4d5e-b6dd-a585be4a21ea {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1300.306543] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65f054ff-b417-45ac-bab1-1ab3c9120909 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.314465] env[63297]: DEBUG oslo_vmware.api [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1300.314465] env[63297]: value = "task-1697174" [ 1300.314465] env[63297]: _type = "Task" [ 1300.314465] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.319783] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1300.326840] env[63297]: DEBUG oslo_vmware.api [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.390099] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697172, 'name': Rename_Task, 'duration_secs': 0.150086} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.390373] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.390589] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6656b39c-759c-4a31-95ba-c919695abeab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.396965] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1300.396965] env[63297]: value = "task-1697175" [ 1300.396965] env[63297]: _type = "Task" [ 1300.396965] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.405666] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.410159] env[63297]: DEBUG nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1300.721232] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.825809] env[63297]: DEBUG oslo_vmware.api [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330336} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.826136] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1300.826360] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1300.826560] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1300.826853] env[63297]: INFO nova.compute.manager [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1300.827065] env[63297]: DEBUG oslo.service.loopingcall [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1300.827292] env[63297]: DEBUG nova.compute.manager [-] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1300.827410] env[63297]: DEBUG nova.network.neutron [-] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1300.829376] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1300.829635] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.286s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.829892] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 37.019s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.908951] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697175, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.952750] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.119248] env[63297]: DEBUG nova.network.neutron [req-3b260dfa-c93c-4b14-9759-a9424a7ed7fd req-57751330-4891-4ce5-98ed-947121389293 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Updated VIF entry in instance network info cache for port 23a6c287-76ba-4a4d-9cfa-cd71492215be. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1301.119806] env[63297]: DEBUG nova.network.neutron [req-3b260dfa-c93c-4b14-9759-a9424a7ed7fd req-57751330-4891-4ce5-98ed-947121389293 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Updating instance_info_cache with network_info: [{"id": "23a6c287-76ba-4a4d-9cfa-cd71492215be", "address": "fa:16:3e:57:5f:ca", "network": {"id": "43d5b50a-9475-40f0-ac20-45a93157aa57", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-64163963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2deadaf96df7430aba8594c7f98facd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a6c287-76", "ovs_interfaceid": "23a6c287-76ba-4a4d-9cfa-cd71492215be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.413117] env[63297]: DEBUG oslo_vmware.api [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697175, 'name': PowerOnVM_Task, 'duration_secs': 0.616615} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.414575] env[63297]: DEBUG nova.compute.manager [req-1eeb0a60-59de-444c-82df-0475f6f07618 req-9309809f-e4b5-47d7-9f5d-4344173b661b service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Received event network-vif-deleted-ca5923c7-8c5e-4d13-8abd-34373c2d8dd5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1301.414575] env[63297]: INFO nova.compute.manager [req-1eeb0a60-59de-444c-82df-0475f6f07618 req-9309809f-e4b5-47d7-9f5d-4344173b661b service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Neutron deleted interface ca5923c7-8c5e-4d13-8abd-34373c2d8dd5; detaching it from the instance and deleting it from the info cache [ 1301.414724] env[63297]: DEBUG nova.network.neutron [req-1eeb0a60-59de-444c-82df-0475f6f07618 req-9309809f-e4b5-47d7-9f5d-4344173b661b service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.415700] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.415948] env[63297]: INFO nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Took 9.10 seconds to spawn the instance on the hypervisor. [ 1301.416605] env[63297]: DEBUG nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1301.417894] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2f82da-86f0-4efe-9edc-a82316390229 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.624354] env[63297]: DEBUG oslo_concurrency.lockutils [req-3b260dfa-c93c-4b14-9759-a9424a7ed7fd req-57751330-4891-4ce5-98ed-947121389293 service nova] Releasing lock "refresh_cache-b853b581-ea46-4455-8cdb-6ea2f31c22be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.686858] env[63297]: DEBUG nova.network.neutron [-] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.797070] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5876c9f3-ddc3-4e8c-b6d7-aa086991006a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.804775] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4953219-b300-4bf1-b4b0-72d027e006b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.834095] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1301.834616] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1301.834806] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1301.836690] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5fc1f0-ea25-40e9-b605-22ee5693fb5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.844183] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fa36a3-81be-4c70-ab58-d18ae79422ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.860489] env[63297]: DEBUG nova.compute.provider_tree [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.918957] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aba6c6b3-a9ad-43ed-b163-4d9953f940ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.928481] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61ef7ff-ef75-42e8-ae6d-c083286668f0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.947195] env[63297]: INFO nova.compute.manager [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Took 42.28 seconds to build instance. [ 1301.967854] env[63297]: DEBUG nova.compute.manager [req-1eeb0a60-59de-444c-82df-0475f6f07618 req-9309809f-e4b5-47d7-9f5d-4344173b661b service nova] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Detach interface failed, port_id=ca5923c7-8c5e-4d13-8abd-34373c2d8dd5, reason: Instance eff06e8a-8341-4d5e-b6dd-a585be4a21ea could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1302.192240] env[63297]: INFO nova.compute.manager [-] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Took 1.36 seconds to deallocate network for instance. [ 1302.362366] env[63297]: DEBUG nova.scheduler.client.report [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1302.449109] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7e5d2c5-db70-4586-94e3-5849c079b8fe tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "581f9d48-dcb8-4a34-928b-64087a9f966b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.539s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.700127] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.923448] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d10f9b9-0422-424f-af83-1644c45d95e8 tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "interface-581f9d48-dcb8-4a34-928b-64087a9f966b-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.923718] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d10f9b9-0422-424f-af83-1644c45d95e8 tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "interface-581f9d48-dcb8-4a34-928b-64087a9f966b-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.924054] env[63297]: DEBUG nova.objects.instance [None req-2d10f9b9-0422-424f-af83-1644c45d95e8 tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lazy-loading 'flavor' on Instance uuid 581f9d48-dcb8-4a34-928b-64087a9f966b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.952065] env[63297]: DEBUG nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1303.373037] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.543s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.375421] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.077s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.376861] env[63297]: INFO nova.compute.claims [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1303.428512] env[63297]: DEBUG nova.objects.instance [None req-2d10f9b9-0422-424f-af83-1644c45d95e8 tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lazy-loading 'pci_requests' on Instance uuid 581f9d48-dcb8-4a34-928b-64087a9f966b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.469816] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.868546] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.868697] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.868840] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1303.931145] env[63297]: DEBUG nova.objects.base [None req-2d10f9b9-0422-424f-af83-1644c45d95e8 tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Object Instance<581f9d48-dcb8-4a34-928b-64087a9f966b> lazy-loaded attributes: flavor,pci_requests {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1303.931383] env[63297]: DEBUG nova.network.neutron [None req-2d10f9b9-0422-424f-af83-1644c45d95e8 tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1303.944882] env[63297]: INFO nova.scheduler.client.report [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Deleted allocation for migration 4720dc5e-30b9-4f95-8509-ba35c8eb57ef [ 1304.045604] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d10f9b9-0422-424f-af83-1644c45d95e8 tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "interface-581f9d48-dcb8-4a34-928b-64087a9f966b-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.122s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.388802] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1304.452272] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09299a6c-926a-4cd2-a42f-fd6edd25eb2f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 44.370s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.814815] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f8ad10-1ebb-42fd-80df-08880202e51b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.822713] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce22abab-7d22-46fb-82b6-67666211c297 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.855514] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ab5a59-1147-4f8d-b0c6-c15670639e32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.863725] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46740bcf-1a27-4b9a-80be-eb843b5bb569 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.877608] env[63297]: DEBUG nova.compute.provider_tree [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1304.974398] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.381383] env[63297]: DEBUG nova.scheduler.client.report [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1305.476966] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-81920a24-f406-4923-98b7-cc0f3d0ccc8b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.477239] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1305.477410] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.477641] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.477830] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.478034] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.478266] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.478474] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.478686] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1305.478751] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.887254] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.511s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.887254] env[63297]: DEBUG nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1305.889721] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.449s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.889909] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.892038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.009s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.892230] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.893898] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.098s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.895434] env[63297]: INFO nova.compute.claims [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1305.915320] env[63297]: INFO nova.scheduler.client.report [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Deleted allocations for instance 746742ac-8d7a-466b-8bc0-043cb5422111 [ 1305.917063] env[63297]: INFO nova.scheduler.client.report [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Deleted allocations for instance 8c10c573-de56-4c72-959a-65bf53b805a5 [ 1305.983085] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.990101] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "581f9d48-dcb8-4a34-928b-64087a9f966b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.990365] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "581f9d48-dcb8-4a34-928b-64087a9f966b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.990588] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "581f9d48-dcb8-4a34-928b-64087a9f966b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.990743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "581f9d48-dcb8-4a34-928b-64087a9f966b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.990913] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "581f9d48-dcb8-4a34-928b-64087a9f966b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.992838] env[63297]: INFO nova.compute.manager [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Terminating instance [ 1305.994481] env[63297]: DEBUG nova.compute.manager [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1305.994670] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1305.995521] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e352b0-ce62-4c9a-aa8a-080e3aef5f05 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.003404] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1306.003632] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18465a1d-c948-42f9-981c-7cb5e5db7d32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.009911] env[63297]: DEBUG oslo_vmware.api [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1306.009911] env[63297]: value = "task-1697179" [ 1306.009911] env[63297]: _type = "Task" [ 1306.009911] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.017313] env[63297]: DEBUG oslo_vmware.api [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.400918] env[63297]: DEBUG nova.compute.utils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1306.405083] env[63297]: DEBUG nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1306.405304] env[63297]: DEBUG nova.network.neutron [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1306.422231] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "b95b7656-70ac-4eaf-9934-4b4c50e78035" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.422462] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.427745] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ee43e86-5fb6-42f7-b447-984d70faf66a tempest-DeleteServersAdminTestJSON-1629689252 tempest-DeleteServersAdminTestJSON-1629689252-project-member] Lock "8c10c573-de56-4c72-959a-65bf53b805a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.017s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.435212] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94cae01a-b159-47e0-9642-744465602fb4 tempest-ImagesOneServerTestJSON-1663521757 tempest-ImagesOneServerTestJSON-1663521757-project-member] Lock "746742ac-8d7a-466b-8bc0-043cb5422111" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.918s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.450754] env[63297]: DEBUG nova.policy [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92fc0015269142758d749fda05ac19ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'baf40bf7f33349cb8bb098887d1244ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1306.523734] env[63297]: DEBUG oslo_vmware.api [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697179, 'name': PowerOffVM_Task, 'duration_secs': 0.201497} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.524208] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1306.524411] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1306.524669] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c134423-a606-433a-ae97-cc2ab41629e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.614277] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1306.614554] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1306.614719] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Deleting the datastore file [datastore1] 581f9d48-dcb8-4a34-928b-64087a9f966b {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1306.614995] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-326674e0-3484-4f4d-8088-07197efe99e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.621672] env[63297]: DEBUG oslo_vmware.api [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for the task: (returnval){ [ 1306.621672] env[63297]: value = "task-1697181" [ 1306.621672] env[63297]: _type = "Task" [ 1306.621672] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.630059] env[63297]: DEBUG oslo_vmware.api [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697181, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.777267] env[63297]: DEBUG nova.network.neutron [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Successfully created port: 1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1306.909254] env[63297]: DEBUG nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1307.131334] env[63297]: DEBUG oslo_vmware.api [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Task: {'id': task-1697181, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23616} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.133774] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1307.134205] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1307.134499] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1307.134795] env[63297]: INFO nova.compute.manager [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1307.137068] env[63297]: DEBUG oslo.service.loopingcall [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1307.137068] env[63297]: DEBUG nova.compute.manager [-] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1307.137068] env[63297]: DEBUG nova.network.neutron [-] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1307.340453] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f341d70-622b-49f9-831c-cbbf57874ef7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.348898] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94486ca2-e242-4529-a8d6-97699b49d56c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.383184] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f649688c-e78a-43f2-aca1-b758cb3c9eb2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.391665] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a519733-2a4e-41a9-903f-26b376d729c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.406815] env[63297]: DEBUG nova.compute.provider_tree [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1307.654226] env[63297]: DEBUG nova.compute.manager [req-28a4efe6-cdac-4798-b2c6-3d21b46ebae5 req-0f61bd59-1a52-4fac-b010-bb8a1a6f9801 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Received event network-vif-deleted-5c09f690-4a73-47ed-860e-be77651b0206 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1307.654473] env[63297]: INFO nova.compute.manager [req-28a4efe6-cdac-4798-b2c6-3d21b46ebae5 req-0f61bd59-1a52-4fac-b010-bb8a1a6f9801 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Neutron deleted interface 5c09f690-4a73-47ed-860e-be77651b0206; detaching it from the instance and deleting it from the info cache [ 1307.654614] env[63297]: DEBUG nova.network.neutron [req-28a4efe6-cdac-4798-b2c6-3d21b46ebae5 req-0f61bd59-1a52-4fac-b010-bb8a1a6f9801 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.908909] env[63297]: DEBUG nova.scheduler.client.report [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1307.918779] env[63297]: DEBUG nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1307.947167] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1307.947433] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1307.947595] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1307.947776] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1307.947926] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1307.948311] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1307.948605] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1307.948818] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1307.949051] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1307.949264] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1307.949478] env[63297]: DEBUG nova.virt.hardware [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1307.950596] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bcfe18-5ec9-4b6e-88f0-a59028327ab8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.960795] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb39c83-c6ef-4856-8c33-a787804f619f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.129582] env[63297]: DEBUG nova.network.neutron [-] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.158055] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db8a5207-4a4a-49d2-966e-ff4406dccd1e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.169520] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cafde6-6e43-4264-bb64-b0f5bdd2642f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.205548] env[63297]: DEBUG nova.compute.manager [req-28a4efe6-cdac-4798-b2c6-3d21b46ebae5 req-0f61bd59-1a52-4fac-b010-bb8a1a6f9801 service nova] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Detach interface failed, port_id=5c09f690-4a73-47ed-860e-be77651b0206, reason: Instance 581f9d48-dcb8-4a34-928b-64087a9f966b could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1308.415514] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1308.416246] env[63297]: DEBUG nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1308.420245] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.999s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.423500] env[63297]: INFO nova.compute.claims [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1308.632566] env[63297]: INFO nova.compute.manager [-] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Took 1.50 seconds to deallocate network for instance. [ 1308.931091] env[63297]: DEBUG nova.compute.utils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1308.936860] env[63297]: DEBUG nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1308.937488] env[63297]: DEBUG nova.network.neutron [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1309.031168] env[63297]: DEBUG nova.policy [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ce4e0757c584ebdb556c79d3c0bd990', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2786fb3158214107a458dc08735ebeb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1309.036730] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Acquiring lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.037111] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.119309] env[63297]: DEBUG nova.network.neutron [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Successfully updated port: 1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1309.139674] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.436087] env[63297]: DEBUG nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1309.558118] env[63297]: DEBUG nova.network.neutron [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Successfully created port: 468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1309.620424] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.620790] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.621034] env[63297]: DEBUG nova.network.neutron [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1309.701227] env[63297]: DEBUG nova.compute.manager [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received event network-vif-plugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1309.701454] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] Acquiring lock "fba9040d-f904-44a1-8785-14d4696ea939-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.701661] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] Lock "fba9040d-f904-44a1-8785-14d4696ea939-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.701947] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] Lock "fba9040d-f904-44a1-8785-14d4696ea939-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.702332] env[63297]: DEBUG nova.compute.manager [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] No waiting events found dispatching network-vif-plugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1309.702427] env[63297]: WARNING nova.compute.manager [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received unexpected event network-vif-plugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 for instance with vm_state building and task_state spawning. [ 1309.702582] env[63297]: DEBUG nova.compute.manager [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received event network-changed-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1309.702739] env[63297]: DEBUG nova.compute.manager [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Refreshing instance network info cache due to event network-changed-1dd69c1e-7eee-4b1b-b4a7-421ab5477495. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1309.702907] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] Acquiring lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.004459] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec42fdd-c2b3-4efb-98e4-54047d3a51bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.014411] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237e26bb-aca5-402e-a338-9191f5151bc2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.047965] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bec9fa9-0ea9-402c-a4e5-5b6056c2f2db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.057797] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13f2c20-4840-4db9-9e0d-7910ddff0d98 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.073540] env[63297]: DEBUG nova.compute.provider_tree [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1310.164869] env[63297]: DEBUG nova.network.neutron [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1310.332132] env[63297]: DEBUG nova.network.neutron [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [{"id": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "address": "fa:16:3e:13:0a:af", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd69c1e-7e", "ovs_interfaceid": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.454849] env[63297]: DEBUG nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1310.476427] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1310.477417] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1310.477417] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1310.477417] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1310.477417] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1310.477417] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1310.477625] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1310.477705] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1310.477864] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1310.478089] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1310.478293] env[63297]: DEBUG nova.virt.hardware [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1310.479476] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14704be1-c78b-4931-8767-1513ca177c32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.488039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d925bc-4a6c-4b0c-ad14-0268dfa00daa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.578473] env[63297]: DEBUG nova.scheduler.client.report [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1310.834685] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.835008] env[63297]: DEBUG nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Instance network_info: |[{"id": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "address": "fa:16:3e:13:0a:af", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd69c1e-7e", "ovs_interfaceid": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1310.835337] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] Acquired lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.835516] env[63297]: DEBUG nova.network.neutron [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Refreshing network info cache for port 1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.837125] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:0a:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1dd69c1e-7eee-4b1b-b4a7-421ab5477495', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1310.845240] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating folder: Project (baf40bf7f33349cb8bb098887d1244ac). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1310.848312] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-999916ee-98ec-470e-8ede-f35976f4095d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.860762] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Created folder: Project (baf40bf7f33349cb8bb098887d1244ac) in parent group-v353718. [ 1310.860950] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating folder: Instances. Parent ref: group-v353832. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1310.861204] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7ae4c51-98dd-44b1-b2c9-f0102c5be201 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.871035] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Created folder: Instances in parent group-v353832. [ 1310.871224] env[63297]: DEBUG oslo.service.loopingcall [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1310.871410] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1310.871614] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-026becbb-5bcb-48ed-b861-4cdadc1d65f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.893100] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1310.893100] env[63297]: value = "task-1697184" [ 1310.893100] env[63297]: _type = "Task" [ 1310.893100] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.901656] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697184, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.090745] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.091573] env[63297]: DEBUG nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1311.096882] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.545s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.100735] env[63297]: INFO nova.compute.claims [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1311.140036] env[63297]: DEBUG nova.network.neutron [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updated VIF entry in instance network info cache for port 1dd69c1e-7eee-4b1b-b4a7-421ab5477495. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.142653] env[63297]: DEBUG nova.network.neutron [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [{"id": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "address": "fa:16:3e:13:0a:af", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd69c1e-7e", "ovs_interfaceid": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.255928] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "c4e96403-895c-479d-bfb2-274a87446bf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.256554] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c4e96403-895c-479d-bfb2-274a87446bf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.404454] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697184, 'name': CreateVM_Task, 'duration_secs': 0.427582} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.405121] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1311.405718] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.405794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.406547] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1311.407623] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5d7d25f-0767-4cee-9f7a-3ef3a477da1f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.412282] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1311.412282] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b337a7-9156-043b-37d4-1a4d9c79a4d8" [ 1311.412282] env[63297]: _type = "Task" [ 1311.412282] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.419732] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b337a7-9156-043b-37d4-1a4d9c79a4d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.420566] env[63297]: DEBUG nova.network.neutron [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Successfully updated port: 468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1311.612110] env[63297]: DEBUG nova.compute.utils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1311.612110] env[63297]: DEBUG nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1311.612110] env[63297]: DEBUG nova.network.neutron [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1311.646804] env[63297]: DEBUG oslo_concurrency.lockutils [req-4d189cc4-c205-47e8-86f3-0f1fb876e12e req-146eca5e-c7c8-4497-9f4b-a88ba17bcfdd service nova] Releasing lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.658762] env[63297]: DEBUG nova.policy [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd720f83f538474f826244d86c2bffdf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3acd68ee64c460ea0c42e9303457a08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1311.765858] env[63297]: DEBUG nova.compute.manager [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Received event network-vif-plugged-468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1311.766568] env[63297]: DEBUG oslo_concurrency.lockutils [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] Acquiring lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.768900] env[63297]: DEBUG oslo_concurrency.lockutils [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.768900] env[63297]: DEBUG oslo_concurrency.lockutils [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.768900] env[63297]: DEBUG nova.compute.manager [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] No waiting events found dispatching network-vif-plugged-468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1311.768900] env[63297]: WARNING nova.compute.manager [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Received unexpected event network-vif-plugged-468613e2-02e8-4bf5-9887-fc0f90ff2f75 for instance with vm_state building and task_state spawning. [ 1311.768900] env[63297]: DEBUG nova.compute.manager [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Received event network-changed-468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1311.768900] env[63297]: DEBUG nova.compute.manager [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Refreshing instance network info cache due to event network-changed-468613e2-02e8-4bf5-9887-fc0f90ff2f75. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1311.768900] env[63297]: DEBUG oslo_concurrency.lockutils [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] Acquiring lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.768900] env[63297]: DEBUG oslo_concurrency.lockutils [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] Acquired lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.768900] env[63297]: DEBUG nova.network.neutron [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Refreshing network info cache for port 468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.923337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.923885] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b337a7-9156-043b-37d4-1a4d9c79a4d8, 'name': SearchDatastore_Task, 'duration_secs': 0.013542} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.924491] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.926017] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1311.926017] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.926017] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.926017] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.926017] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74ec528c-a2d0-4356-93e2-9c596541adfa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.938063] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.938517] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1311.939393] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-056e64c0-03a1-4cf3-9bb4-018d19995b17 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.947021] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1311.947021] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d8ddbc-0b51-bf8d-9398-bb1233697b58" [ 1311.947021] env[63297]: _type = "Task" [ 1311.947021] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.954676] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d8ddbc-0b51-bf8d-9398-bb1233697b58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.984833] env[63297]: DEBUG nova.network.neutron [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Successfully created port: ba10706f-1f6c-457c-8e5c-f18207b8577a {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1312.114386] env[63297]: DEBUG nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1312.316317] env[63297]: DEBUG nova.network.neutron [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1312.409835] env[63297]: DEBUG nova.network.neutron [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.457422] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d8ddbc-0b51-bf8d-9398-bb1233697b58, 'name': SearchDatastore_Task, 'duration_secs': 0.010608} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.460966] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b56cd238-5c78-43c4-bf7f-3aeb40db9a78 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.467528] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1312.467528] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cd36db-3e84-a2ef-33b3-813497c6bf74" [ 1312.467528] env[63297]: _type = "Task" [ 1312.467528] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.477363] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cd36db-3e84-a2ef-33b3-813497c6bf74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.583788] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aaf84ce-ab1b-4219-a453-4b42767daa0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.591138] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07d0d39-23ff-466f-aeed-a59e6e82105a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.623920] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2e03df-3cbe-4c94-a907-c8d4cb4d2298 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.631636] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a1a30d-99be-4518-976d-d1b4e1a2c1dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.644970] env[63297]: DEBUG nova.compute.provider_tree [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.915064] env[63297]: DEBUG oslo_concurrency.lockutils [req-d140dee6-02d6-4fa5-a27c-4f4fc1b1603d req-ec29a3a9-786f-4d4c-8884-25816a3327f5 service nova] Releasing lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.915474] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.915638] env[63297]: DEBUG nova.network.neutron [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1312.981120] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cd36db-3e84-a2ef-33b3-813497c6bf74, 'name': SearchDatastore_Task, 'duration_secs': 0.014576} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.981548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.981915] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fba9040d-f904-44a1-8785-14d4696ea939/fba9040d-f904-44a1-8785-14d4696ea939.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1312.982304] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d1620e1-3c04-4074-9665-76e9f02b3205 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.991544] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1312.991544] env[63297]: value = "task-1697185" [ 1312.991544] env[63297]: _type = "Task" [ 1312.991544] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.005809] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697185, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.127980] env[63297]: DEBUG nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1313.148439] env[63297]: DEBUG nova.scheduler.client.report [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1313.160556] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1313.160850] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1313.161049] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1313.161287] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1313.161479] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1313.161663] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1313.161915] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1313.162137] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1313.162348] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1313.162544] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1313.162764] env[63297]: DEBUG nova.virt.hardware [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1313.164034] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865bafb7-597d-468d-a1cd-81a4560bc97f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.173443] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38fb2ca-b204-46cc-8ced-c6b1f5ea9689 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.480306] env[63297]: DEBUG nova.network.neutron [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1313.505509] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697185, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.655905] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.656533] env[63297]: DEBUG nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1313.659909] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.486s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.662255] env[63297]: INFO nova.compute.claims [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1313.799923] env[63297]: DEBUG nova.compute.manager [req-08e11f9d-a4cb-498a-a7f7-1cb248b57145 req-06044097-c6cb-4614-a736-c1a8a4e3bde1 service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Received event network-vif-plugged-ba10706f-1f6c-457c-8e5c-f18207b8577a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1313.800229] env[63297]: DEBUG oslo_concurrency.lockutils [req-08e11f9d-a4cb-498a-a7f7-1cb248b57145 req-06044097-c6cb-4614-a736-c1a8a4e3bde1 service nova] Acquiring lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.800437] env[63297]: DEBUG oslo_concurrency.lockutils [req-08e11f9d-a4cb-498a-a7f7-1cb248b57145 req-06044097-c6cb-4614-a736-c1a8a4e3bde1 service nova] Lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.800667] env[63297]: DEBUG oslo_concurrency.lockutils [req-08e11f9d-a4cb-498a-a7f7-1cb248b57145 req-06044097-c6cb-4614-a736-c1a8a4e3bde1 service nova] Lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.800789] env[63297]: DEBUG nova.compute.manager [req-08e11f9d-a4cb-498a-a7f7-1cb248b57145 req-06044097-c6cb-4614-a736-c1a8a4e3bde1 service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] No waiting events found dispatching network-vif-plugged-ba10706f-1f6c-457c-8e5c-f18207b8577a {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1313.800924] env[63297]: WARNING nova.compute.manager [req-08e11f9d-a4cb-498a-a7f7-1cb248b57145 req-06044097-c6cb-4614-a736-c1a8a4e3bde1 service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Received unexpected event network-vif-plugged-ba10706f-1f6c-457c-8e5c-f18207b8577a for instance with vm_state building and task_state spawning. [ 1313.813167] env[63297]: DEBUG nova.network.neutron [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Updating instance_info_cache with network_info: [{"id": "468613e2-02e8-4bf5-9887-fc0f90ff2f75", "address": "fa:16:3e:bb:c6:4d", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap468613e2-02", "ovs_interfaceid": "468613e2-02e8-4bf5-9887-fc0f90ff2f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.917852] env[63297]: DEBUG nova.network.neutron [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Successfully updated port: ba10706f-1f6c-457c-8e5c-f18207b8577a {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1314.002997] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697185, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904715} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.003233] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fba9040d-f904-44a1-8785-14d4696ea939/fba9040d-f904-44a1-8785-14d4696ea939.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1314.003442] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1314.003699] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f1da3ac-a300-4cf3-b574-714a38bb57e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.011433] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1314.011433] env[63297]: value = "task-1697186" [ 1314.011433] env[63297]: _type = "Task" [ 1314.011433] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.019201] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697186, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.171944] env[63297]: DEBUG nova.compute.utils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1314.176259] env[63297]: DEBUG nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1314.176363] env[63297]: DEBUG nova.network.neutron [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1314.218895] env[63297]: DEBUG nova.policy [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13c8359a7e444cb685dddde06efa122c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babfd205ed454924b0bceb1d03fcfdf2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1314.316133] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.316483] env[63297]: DEBUG nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Instance network_info: |[{"id": "468613e2-02e8-4bf5-9887-fc0f90ff2f75", "address": "fa:16:3e:bb:c6:4d", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap468613e2-02", "ovs_interfaceid": "468613e2-02e8-4bf5-9887-fc0f90ff2f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1314.316927] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:c6:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '468613e2-02e8-4bf5-9887-fc0f90ff2f75', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1314.324759] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Creating folder: Project (2786fb3158214107a458dc08735ebeb1). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1314.325638] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ec51e8f-ba71-45da-8cb1-833c13f45538 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.335677] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Created folder: Project (2786fb3158214107a458dc08735ebeb1) in parent group-v353718. [ 1314.335857] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Creating folder: Instances. Parent ref: group-v353835. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1314.336086] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48e1da73-6327-488c-aa23-8a05317e0b3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.345392] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Created folder: Instances in parent group-v353835. [ 1314.345626] env[63297]: DEBUG oslo.service.loopingcall [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1314.345806] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1314.345994] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e189d56-5130-4099-998f-bff402f87a89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.364907] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1314.364907] env[63297]: value = "task-1697189" [ 1314.364907] env[63297]: _type = "Task" [ 1314.364907] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.374516] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697189, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.421916] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquiring lock "refresh_cache-8bc4bb67-bc00-44c6-9c83-c0a1072142e6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.421916] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquired lock "refresh_cache-8bc4bb67-bc00-44c6-9c83-c0a1072142e6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.422064] env[63297]: DEBUG nova.network.neutron [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.523089] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697186, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072589} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.523562] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1314.524207] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e3d096-614d-4d04-9dc0-0484ce859a38 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.551057] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] fba9040d-f904-44a1-8785-14d4696ea939/fba9040d-f904-44a1-8785-14d4696ea939.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1314.551057] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca0e90b4-a91a-494d-ad68-aab48b247319 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.575226] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1314.575226] env[63297]: value = "task-1697190" [ 1314.575226] env[63297]: _type = "Task" [ 1314.575226] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.587841] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697190, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.588780] env[63297]: DEBUG nova.network.neutron [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Successfully created port: 614d9b29-4dd8-4ac5-bbb4-bb43593e3386 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1314.679140] env[63297]: DEBUG nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1314.875226] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697189, 'name': CreateVM_Task, 'duration_secs': 0.385308} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.876029] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1314.876248] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.876423] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.876827] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1314.877688] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c68a2ac-822b-4e85-a19c-fef4f9c61493 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.883012] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1314.883012] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5222f309-0087-bead-bae9-cce461ccdbce" [ 1314.883012] env[63297]: _type = "Task" [ 1314.883012] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.892268] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5222f309-0087-bead-bae9-cce461ccdbce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.947444] env[63297]: DEBUG nova.network.neutron [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Successfully created port: 6a0b5502-f624-47b8-b693-32a590e69f57 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1314.976152] env[63297]: DEBUG nova.network.neutron [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1315.093194] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697190, 'name': ReconfigVM_Task, 'duration_secs': 0.293034} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.093484] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Reconfigured VM instance instance-00000023 to attach disk [datastore1] fba9040d-f904-44a1-8785-14d4696ea939/fba9040d-f904-44a1-8785-14d4696ea939.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1315.094143] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-560d2576-3256-4022-a53f-e185da96f6f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.105288] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1315.105288] env[63297]: value = "task-1697191" [ 1315.105288] env[63297]: _type = "Task" [ 1315.105288] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.115171] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697191, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.182735] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e01d8cc-8c1c-497d-a33d-d39043febd16 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.197039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a021ea31-9b23-4484-aa69-30a3278ec105 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.232373] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28045794-313b-4d12-9755-b17599b70bf7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.237491] env[63297]: DEBUG nova.network.neutron [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Updating instance_info_cache with network_info: [{"id": "ba10706f-1f6c-457c-8e5c-f18207b8577a", "address": "fa:16:3e:dc:e0:e2", "network": {"id": "33fc1e66-6696-4c7b-9f61-49961c762746", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-183104473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3acd68ee64c460ea0c42e9303457a08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba10706f-1f", "ovs_interfaceid": "ba10706f-1f6c-457c-8e5c-f18207b8577a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.243529] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ab1e86-6f9a-4bee-9d1b-fde7208cb981 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.259647] env[63297]: DEBUG nova.compute.provider_tree [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1315.394035] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5222f309-0087-bead-bae9-cce461ccdbce, 'name': SearchDatastore_Task, 'duration_secs': 0.014735} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.394035] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.394035] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1315.394035] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1315.394035] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.394280] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1315.394432] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9311f9b2-88c4-4d1a-89af-d29187f47031 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.402459] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1315.402627] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1315.403344] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1314b2d2-b1bb-49d8-be36-4974f439dde1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.408258] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1315.408258] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527b972c-f5f6-e4af-d630-a5f8e7caa3ed" [ 1315.408258] env[63297]: _type = "Task" [ 1315.408258] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.415484] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527b972c-f5f6-e4af-d630-a5f8e7caa3ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.615205] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697191, 'name': Rename_Task, 'duration_secs': 0.21209} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.615473] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1315.615707] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8784f99-1530-4ab7-b504-a18680b1bd4e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.621627] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1315.621627] env[63297]: value = "task-1697192" [ 1315.621627] env[63297]: _type = "Task" [ 1315.621627] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.629577] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697192, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.693975] env[63297]: DEBUG nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1315.720272] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1315.720552] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1315.720752] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1315.720948] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1315.721121] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1315.721268] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1315.721473] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1315.721649] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1315.721817] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1315.722144] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1315.723057] env[63297]: DEBUG nova.virt.hardware [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1315.723368] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94623055-b020-4e40-997e-125b70972c4a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.731405] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937d219d-f6d5-47fc-bdd5-323abbe0bacb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.744576] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Releasing lock "refresh_cache-8bc4bb67-bc00-44c6-9c83-c0a1072142e6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.744856] env[63297]: DEBUG nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Instance network_info: |[{"id": "ba10706f-1f6c-457c-8e5c-f18207b8577a", "address": "fa:16:3e:dc:e0:e2", "network": {"id": "33fc1e66-6696-4c7b-9f61-49961c762746", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-183104473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3acd68ee64c460ea0c42e9303457a08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba10706f-1f", "ovs_interfaceid": "ba10706f-1f6c-457c-8e5c-f18207b8577a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1315.745404] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:e0:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba10706f-1f6c-457c-8e5c-f18207b8577a', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1315.752863] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Creating folder: Project (d3acd68ee64c460ea0c42e9303457a08). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1315.753083] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-caa5650d-a4c7-41ff-baf2-3cd7022e4627 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.762042] env[63297]: DEBUG nova.scheduler.client.report [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1315.766013] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Created folder: Project (d3acd68ee64c460ea0c42e9303457a08) in parent group-v353718. [ 1315.766207] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Creating folder: Instances. Parent ref: group-v353838. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1315.766630] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7a58b49-1c9f-4f77-8b7e-28607123076d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.775428] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Created folder: Instances in parent group-v353838. [ 1315.775675] env[63297]: DEBUG oslo.service.loopingcall [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1315.775872] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1315.776071] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a938e05b-9229-46b6-9244-39918a841cc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.797066] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1315.797066] env[63297]: value = "task-1697195" [ 1315.797066] env[63297]: _type = "Task" [ 1315.797066] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.805033] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697195, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.909018] env[63297]: DEBUG nova.compute.manager [req-d2a7b36b-ede2-46dd-ad14-7045d33bcd4d req-86d82e15-fcf5-4f5d-9f0f-14b8530c7a6c service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Received event network-changed-ba10706f-1f6c-457c-8e5c-f18207b8577a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1315.909267] env[63297]: DEBUG nova.compute.manager [req-d2a7b36b-ede2-46dd-ad14-7045d33bcd4d req-86d82e15-fcf5-4f5d-9f0f-14b8530c7a6c service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Refreshing instance network info cache due to event network-changed-ba10706f-1f6c-457c-8e5c-f18207b8577a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1315.909528] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2a7b36b-ede2-46dd-ad14-7045d33bcd4d req-86d82e15-fcf5-4f5d-9f0f-14b8530c7a6c service nova] Acquiring lock "refresh_cache-8bc4bb67-bc00-44c6-9c83-c0a1072142e6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1315.909703] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2a7b36b-ede2-46dd-ad14-7045d33bcd4d req-86d82e15-fcf5-4f5d-9f0f-14b8530c7a6c service nova] Acquired lock "refresh_cache-8bc4bb67-bc00-44c6-9c83-c0a1072142e6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.909907] env[63297]: DEBUG nova.network.neutron [req-d2a7b36b-ede2-46dd-ad14-7045d33bcd4d req-86d82e15-fcf5-4f5d-9f0f-14b8530c7a6c service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Refreshing network info cache for port ba10706f-1f6c-457c-8e5c-f18207b8577a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1315.921106] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527b972c-f5f6-e4af-d630-a5f8e7caa3ed, 'name': SearchDatastore_Task, 'duration_secs': 0.008495} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.922387] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee62d8f0-0aef-41ce-a56a-dc9e7e4460b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.928040] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1315.928040] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5245950f-fb82-6fdc-a355-2cd43484054f" [ 1315.928040] env[63297]: _type = "Task" [ 1315.928040] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.936015] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5245950f-fb82-6fdc-a355-2cd43484054f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.130682] env[63297]: DEBUG oslo_vmware.api [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697192, 'name': PowerOnVM_Task, 'duration_secs': 0.500052} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.130955] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1316.131173] env[63297]: INFO nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1316.131371] env[63297]: DEBUG nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1316.132203] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414e530a-0d16-44fb-a9eb-9b4b09bb8979 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.268737] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.608s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.269043] env[63297]: DEBUG nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1316.271671] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.455s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.271725] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.273820] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.681s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.273975] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.276117] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.628s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.277102] env[63297]: INFO nova.compute.claims [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1316.305632] env[63297]: INFO nova.scheduler.client.report [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleted allocations for instance 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a [ 1316.307368] env[63297]: INFO nova.scheduler.client.report [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Deleted allocations for instance 87fa97a7-a8a5-4184-b52a-b02ad5468127 [ 1316.316385] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697195, 'name': CreateVM_Task, 'duration_secs': 0.416261} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.316843] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1316.317588] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.318489] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.318846] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1316.319615] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-732b9156-1c6b-4abc-baee-6760f1a5cef6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.325400] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1316.325400] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5234d9f0-f3a0-8b51-9956-40e2ea52d82b" [ 1316.325400] env[63297]: _type = "Task" [ 1316.325400] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.334359] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5234d9f0-f3a0-8b51-9956-40e2ea52d82b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.438070] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5245950f-fb82-6fdc-a355-2cd43484054f, 'name': SearchDatastore_Task, 'duration_secs': 0.009461} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.438292] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.438545] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f/1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1316.438801] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3dc40872-6842-4ceb-9c05-b8d31deb21b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.446917] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1316.446917] env[63297]: value = "task-1697196" [ 1316.446917] env[63297]: _type = "Task" [ 1316.446917] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.456281] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697196, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.651543] env[63297]: INFO nova.compute.manager [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Took 51.37 seconds to build instance. [ 1316.781969] env[63297]: DEBUG nova.compute.utils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1316.787384] env[63297]: DEBUG nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1316.787384] env[63297]: DEBUG nova.network.neutron [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1316.823228] env[63297]: DEBUG oslo_concurrency.lockutils [None req-22bafb74-d54a-4a30-8364-b8389eecc7ce tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.179s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.824915] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7d50e39-b2c2-4c32-af8b-3543b785f9e5 tempest-ServersNegativeTestMultiTenantJSON-1018528106 tempest-ServersNegativeTestMultiTenantJSON-1018528106-project-member] Lock "87fa97a7-a8a5-4184-b52a-b02ad5468127" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.778s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.840660] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5234d9f0-f3a0-8b51-9956-40e2ea52d82b, 'name': SearchDatastore_Task, 'duration_secs': 0.009323} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.842291] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.843927] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1316.844232] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.844388] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.844574] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1316.846257] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de1e0902-0d3e-46e4-9d2a-82f5cecd813b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.849941] env[63297]: DEBUG nova.policy [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8767f029ef2847acb8679c8dda841e61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de74a055696b4dd69b88d08b52d327d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1316.867148] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1316.867351] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1316.868122] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-828019fe-4f03-40d1-a26e-7aea3b92f176 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.874446] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1316.874446] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524f7afe-d4b2-9e5f-9bf8-6d864a8201e6" [ 1316.874446] env[63297]: _type = "Task" [ 1316.874446] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.885588] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524f7afe-d4b2-9e5f-9bf8-6d864a8201e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.959498] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697196, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495524} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.961023] env[63297]: DEBUG nova.network.neutron [req-d2a7b36b-ede2-46dd-ad14-7045d33bcd4d req-86d82e15-fcf5-4f5d-9f0f-14b8530c7a6c service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Updated VIF entry in instance network info cache for port ba10706f-1f6c-457c-8e5c-f18207b8577a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1316.961372] env[63297]: DEBUG nova.network.neutron [req-d2a7b36b-ede2-46dd-ad14-7045d33bcd4d req-86d82e15-fcf5-4f5d-9f0f-14b8530c7a6c service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Updating instance_info_cache with network_info: [{"id": "ba10706f-1f6c-457c-8e5c-f18207b8577a", "address": "fa:16:3e:dc:e0:e2", "network": {"id": "33fc1e66-6696-4c7b-9f61-49961c762746", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-183104473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3acd68ee64c460ea0c42e9303457a08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba10706f-1f", "ovs_interfaceid": "ba10706f-1f6c-457c-8e5c-f18207b8577a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.962655] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f/1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1316.965749] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1316.965749] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8e15c2b-1bf5-4566-a281-be1404b239af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.969795] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1316.969795] env[63297]: value = "task-1697197" [ 1316.969795] env[63297]: _type = "Task" [ 1316.969795] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.979434] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.083335] env[63297]: DEBUG nova.network.neutron [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Successfully updated port: 614d9b29-4dd8-4ac5-bbb4-bb43593e3386 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1317.154687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c44527fb-ed0e-4749-a424-c23e6fe629fa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.673s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.289893] env[63297]: DEBUG nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1317.294425] env[63297]: DEBUG nova.network.neutron [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Successfully created port: 7c20637a-f8f5-4a26-b5af-07db4b1c9991 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1317.390185] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524f7afe-d4b2-9e5f-9bf8-6d864a8201e6, 'name': SearchDatastore_Task, 'duration_secs': 0.047919} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.391277] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1146ea6c-cb7b-490a-9297-f550e5f280bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.398998] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1317.398998] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526dc455-3945-884f-169d-1506b0d9e729" [ 1317.398998] env[63297]: _type = "Task" [ 1317.398998] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.408328] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526dc455-3945-884f-169d-1506b0d9e729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.465235] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2a7b36b-ede2-46dd-ad14-7045d33bcd4d req-86d82e15-fcf5-4f5d-9f0f-14b8530c7a6c service nova] Releasing lock "refresh_cache-8bc4bb67-bc00-44c6-9c83-c0a1072142e6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.479640] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697197, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08027} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.482573] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1317.483944] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7476bbb-55f6-4f2d-bc6a-753a37613f26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.510854] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f/1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1317.515019] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fee9c16-3483-4cd8-8895-69c1d665d459 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.538021] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1317.538021] env[63297]: value = "task-1697198" [ 1317.538021] env[63297]: _type = "Task" [ 1317.538021] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.545974] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697198, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.658571] env[63297]: DEBUG nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1317.828108] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.828108] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.872757] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21707d6-2de5-4b85-9bd5-6bc8fa2ee8fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.881008] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd9443b-5f9a-4e3f-9177-6db464371730 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.918918] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01767fbf-268c-4be7-8737-ba066a693831 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.929648] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526dc455-3945-884f-169d-1506b0d9e729, 'name': SearchDatastore_Task, 'duration_secs': 0.009806} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.931996] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.932361] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 8bc4bb67-bc00-44c6-9c83-c0a1072142e6/8bc4bb67-bc00-44c6-9c83-c0a1072142e6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1317.932676] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61cec1ce-131f-4275-85d9-358f78ca739f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.935792] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53408eb8-1f53-40cb-8867-348060a78552 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.941376] env[63297]: DEBUG nova.compute.manager [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Received event network-vif-plugged-614d9b29-4dd8-4ac5-bbb4-bb43593e3386 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1317.941577] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] Acquiring lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.941777] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.941956] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.942147] env[63297]: DEBUG nova.compute.manager [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] No waiting events found dispatching network-vif-plugged-614d9b29-4dd8-4ac5-bbb4-bb43593e3386 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1317.942310] env[63297]: WARNING nova.compute.manager [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Received unexpected event network-vif-plugged-614d9b29-4dd8-4ac5-bbb4-bb43593e3386 for instance with vm_state building and task_state spawning. [ 1317.942463] env[63297]: DEBUG nova.compute.manager [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Received event network-changed-614d9b29-4dd8-4ac5-bbb4-bb43593e3386 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1317.942714] env[63297]: DEBUG nova.compute.manager [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Refreshing instance network info cache due to event network-changed-614d9b29-4dd8-4ac5-bbb4-bb43593e3386. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1317.942824] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] Acquiring lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.943015] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] Acquired lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.943666] env[63297]: DEBUG nova.network.neutron [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Refreshing network info cache for port 614d9b29-4dd8-4ac5-bbb4-bb43593e3386 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1318.707736] env[63297]: DEBUG nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1318.712056] env[63297]: DEBUG nova.compute.provider_tree [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.726204] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1318.726204] env[63297]: value = "task-1697199" [ 1318.726204] env[63297]: _type = "Task" [ 1318.726204] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.727022] env[63297]: DEBUG nova.scheduler.client.report [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1318.741103] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697198, 'name': ReconfigVM_Task, 'duration_secs': 0.295726} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.742044] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f/1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.743016] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b23d6951-2a41-42f0-84ad-ea6ceb9e613d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.751611] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697199, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.754144] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.760499] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1318.760726] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1318.760874] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1318.761061] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1318.761201] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1318.761337] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1318.761536] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1318.761682] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1318.761838] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1318.762035] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1318.762262] env[63297]: DEBUG nova.virt.hardware [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1318.763430] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9661c258-4f7e-473d-af38-eb7f0dd1f023 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.767468] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1318.767468] env[63297]: value = "task-1697200" [ 1318.767468] env[63297]: _type = "Task" [ 1318.767468] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.776024] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1030df84-72d4-44ea-9cea-5ca1b4a819dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.783619] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697200, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.799578] env[63297]: DEBUG nova.network.neutron [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1319.007231] env[63297]: DEBUG nova.network.neutron [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.141064] env[63297]: DEBUG nova.compute.manager [req-8a7482db-1f3b-425c-aef1-0c9458406715 req-26107235-dd69-4ece-8b3a-561db874794b service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Received event network-vif-plugged-7c20637a-f8f5-4a26-b5af-07db4b1c9991 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1319.141331] env[63297]: DEBUG oslo_concurrency.lockutils [req-8a7482db-1f3b-425c-aef1-0c9458406715 req-26107235-dd69-4ece-8b3a-561db874794b service nova] Acquiring lock "8fa5fef6-8768-4e24-aab3-db56a10588c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.141544] env[63297]: DEBUG oslo_concurrency.lockutils [req-8a7482db-1f3b-425c-aef1-0c9458406715 req-26107235-dd69-4ece-8b3a-561db874794b service nova] Lock "8fa5fef6-8768-4e24-aab3-db56a10588c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.141710] env[63297]: DEBUG oslo_concurrency.lockutils [req-8a7482db-1f3b-425c-aef1-0c9458406715 req-26107235-dd69-4ece-8b3a-561db874794b service nova] Lock "8fa5fef6-8768-4e24-aab3-db56a10588c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.141875] env[63297]: DEBUG nova.compute.manager [req-8a7482db-1f3b-425c-aef1-0c9458406715 req-26107235-dd69-4ece-8b3a-561db874794b service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] No waiting events found dispatching network-vif-plugged-7c20637a-f8f5-4a26-b5af-07db4b1c9991 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1319.142168] env[63297]: WARNING nova.compute.manager [req-8a7482db-1f3b-425c-aef1-0c9458406715 req-26107235-dd69-4ece-8b3a-561db874794b service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Received unexpected event network-vif-plugged-7c20637a-f8f5-4a26-b5af-07db4b1c9991 for instance with vm_state building and task_state spawning. [ 1319.200721] env[63297]: DEBUG nova.network.neutron [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Successfully updated port: 7c20637a-f8f5-4a26-b5af-07db4b1c9991 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1319.227476] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "92439795-6240-4103-940b-de6d87738570" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.227744] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "92439795-6240-4103-940b-de6d87738570" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.227952] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "92439795-6240-4103-940b-de6d87738570-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.228146] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "92439795-6240-4103-940b-de6d87738570-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.228314] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "92439795-6240-4103-940b-de6d87738570-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.233529] env[63297]: INFO nova.compute.manager [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Terminating instance [ 1319.236020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.236020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.236020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.236020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.236020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.237645] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.962s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.239178] env[63297]: DEBUG nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1319.243274] env[63297]: INFO nova.compute.manager [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Terminating instance [ 1319.244716] env[63297]: DEBUG nova.compute.manager [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1319.244909] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1319.249614] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.466s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.249800] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.251653] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.124s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.251841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.254038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.226s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.255748] env[63297]: INFO nova.compute.claims [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1319.259117] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00540796-fd31-4fba-8735-1b11a605ff58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.262285] env[63297]: DEBUG nova.compute.manager [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1319.262465] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1319.263789] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02769f75-b6cd-4d38-a951-5caf9af61dd9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.279384] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697199, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.012539} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.279664] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1319.280046] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1319.281133] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 8bc4bb67-bc00-44c6-9c83-c0a1072142e6/8bc4bb67-bc00-44c6-9c83-c0a1072142e6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1319.281350] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1319.281577] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-392008a3-4cc6-4695-8a9d-29e22816b8c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.283195] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1853f957-c87e-4e3b-a70f-009fc6c930e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.286620] env[63297]: INFO nova.scheduler.client.report [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Deleted allocations for instance 22a927ad-c2af-4814-b728-ec31b76a34d4 [ 1319.290208] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e65cc179-a78c-40a1-ae14-c7eee7f5da22 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.293653] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697200, 'name': Rename_Task, 'duration_secs': 0.17221} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.296199] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1319.297306] env[63297]: INFO nova.scheduler.client.report [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Deleted allocations for instance 81920a24-f406-4923-98b7-cc0f3d0ccc8b [ 1319.300535] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2de93e79-513d-442b-b0a4-cde3516627bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.305803] env[63297]: DEBUG oslo_vmware.api [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1319.305803] env[63297]: value = "task-1697201" [ 1319.305803] env[63297]: _type = "Task" [ 1319.305803] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.306174] env[63297]: DEBUG oslo_vmware.api [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1319.306174] env[63297]: value = "task-1697202" [ 1319.306174] env[63297]: _type = "Task" [ 1319.306174] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.308077] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1319.308077] env[63297]: value = "task-1697203" [ 1319.308077] env[63297]: _type = "Task" [ 1319.308077] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.321292] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1319.321292] env[63297]: value = "task-1697204" [ 1319.321292] env[63297]: _type = "Task" [ 1319.321292] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.335385] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.336380] env[63297]: DEBUG oslo_vmware.api [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.336380] env[63297]: DEBUG oslo_vmware.api [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697201, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.342040] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697204, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.508908] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f8878b3-dd3a-4ee2-8f22-87f6410e0980 req-f02daac5-6253-43e1-9cac-3552b3e1460c service nova] Releasing lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1319.687389] env[63297]: DEBUG nova.network.neutron [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Successfully updated port: 6a0b5502-f624-47b8-b693-32a590e69f57 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1319.704190] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "refresh_cache-8fa5fef6-8768-4e24-aab3-db56a10588c2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.704372] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "refresh_cache-8fa5fef6-8768-4e24-aab3-db56a10588c2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.704543] env[63297]: DEBUG nova.network.neutron [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.744970] env[63297]: DEBUG nova.compute.utils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1319.746418] env[63297]: DEBUG nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1319.746531] env[63297]: DEBUG nova.network.neutron [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1319.798758] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5bc90ed2-2972-4afd-8256-8f76108e5750 tempest-InstanceActionsNegativeTestJSON-176675487 tempest-InstanceActionsNegativeTestJSON-176675487-project-member] Lock "22a927ad-c2af-4814-b728-ec31b76a34d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.579s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.801113] env[63297]: DEBUG nova.policy [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30253d72cb1f4a9faa9b616ad418d9e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1348601359224186bf59b12bfa5f1ef0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1319.808550] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c1a3837-54c8-4e44-938d-436a0bdc5def tempest-ServersAdminNegativeTestJSON-1639887896 tempest-ServersAdminNegativeTestJSON-1639887896-project-member] Lock "81920a24-f406-4923-98b7-cc0f3d0ccc8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.032s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.829126] env[63297]: DEBUG oslo_vmware.api [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697201, 'name': PowerOffVM_Task, 'duration_secs': 0.19145} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.837039] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1319.837128] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1319.837661] env[63297]: DEBUG oslo_vmware.api [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697202, 'name': PowerOffVM_Task, 'duration_secs': 0.232463} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.837889] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04d6538c-7b0f-4c41-92e7-c5cd921a945e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.839732] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1319.839951] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1319.843877] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3e1025b-17ed-420b-8c57-9a96f7b3426e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.846410] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081238} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.850917] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1319.851572] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697204, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.853376] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f37c3e7-6c09-4b82-a17c-91e61065f3f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.879424] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 8bc4bb67-bc00-44c6-9c83-c0a1072142e6/8bc4bb67-bc00-44c6-9c83-c0a1072142e6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1319.883045] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5a43afe-1d5c-45f7-90f5-54bff36ded62 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.902758] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1319.902758] env[63297]: value = "task-1697207" [ 1319.902758] env[63297]: _type = "Task" [ 1319.902758] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.915592] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697207, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.938073] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1319.938327] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1319.938996] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleting the datastore file [datastore1] 92439795-6240-4103-940b-de6d87738570 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1319.938996] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee97a2e4-5f70-423d-998a-273f0f886999 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.945713] env[63297]: DEBUG oslo_vmware.api [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1319.945713] env[63297]: value = "task-1697208" [ 1319.945713] env[63297]: _type = "Task" [ 1319.945713] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.955351] env[63297]: DEBUG oslo_vmware.api [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697208, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.967902] env[63297]: DEBUG nova.compute.manager [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Received event network-vif-plugged-6a0b5502-f624-47b8-b693-32a590e69f57 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1319.968098] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] Acquiring lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.968337] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.968571] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.968749] env[63297]: DEBUG nova.compute.manager [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] No waiting events found dispatching network-vif-plugged-6a0b5502-f624-47b8-b693-32a590e69f57 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1319.969138] env[63297]: WARNING nova.compute.manager [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Received unexpected event network-vif-plugged-6a0b5502-f624-47b8-b693-32a590e69f57 for instance with vm_state building and task_state spawning. [ 1319.969359] env[63297]: DEBUG nova.compute.manager [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Received event network-changed-6a0b5502-f624-47b8-b693-32a590e69f57 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1319.969538] env[63297]: DEBUG nova.compute.manager [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Refreshing instance network info cache due to event network-changed-6a0b5502-f624-47b8-b693-32a590e69f57. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1319.969722] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] Acquiring lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.970143] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] Acquired lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.970143] env[63297]: DEBUG nova.network.neutron [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Refreshing network info cache for port 6a0b5502-f624-47b8-b693-32a590e69f57 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1319.971348] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1319.975099] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1319.975099] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleting the datastore file [datastore1] eebcad60-4b8a-4fa0-b846-b65972c4c69c {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1319.975099] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bded01d2-cfad-48f9-b49d-ec740e0a825b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.980551] env[63297]: DEBUG oslo_vmware.api [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for the task: (returnval){ [ 1319.980551] env[63297]: value = "task-1697209" [ 1319.980551] env[63297]: _type = "Task" [ 1319.980551] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.991596] env[63297]: DEBUG oslo_vmware.api [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.189937] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.251519] env[63297]: DEBUG nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1320.253594] env[63297]: DEBUG nova.network.neutron [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1320.258467] env[63297]: DEBUG nova.network.neutron [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Successfully created port: fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1320.345249] env[63297]: DEBUG oslo_vmware.api [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697204, 'name': PowerOnVM_Task, 'duration_secs': 0.728384} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.345509] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1320.345843] env[63297]: INFO nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Took 9.89 seconds to spawn the instance on the hypervisor. [ 1320.346143] env[63297]: DEBUG nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1320.347849] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cd2e9f-b165-4b68-b850-8ff04a41ab92 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.416654] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697207, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.466727] env[63297]: DEBUG oslo_vmware.api [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697208, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173071} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.466727] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1320.466889] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1320.467258] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1320.467487] env[63297]: INFO nova.compute.manager [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: 92439795-6240-4103-940b-de6d87738570] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1320.467748] env[63297]: DEBUG oslo.service.loopingcall [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1320.470598] env[63297]: DEBUG nova.compute.manager [-] [instance: 92439795-6240-4103-940b-de6d87738570] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1320.470701] env[63297]: DEBUG nova.network.neutron [-] [instance: 92439795-6240-4103-940b-de6d87738570] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1320.495140] env[63297]: DEBUG oslo_vmware.api [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Task: {'id': task-1697209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170031} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.495519] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1320.495519] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1320.495672] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1320.495820] env[63297]: INFO nova.compute.manager [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1320.496068] env[63297]: DEBUG oslo.service.loopingcall [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1320.496261] env[63297]: DEBUG nova.compute.manager [-] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1320.496353] env[63297]: DEBUG nova.network.neutron [-] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1320.576867] env[63297]: DEBUG nova.network.neutron [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1320.617806] env[63297]: DEBUG nova.network.neutron [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Updating instance_info_cache with network_info: [{"id": "7c20637a-f8f5-4a26-b5af-07db4b1c9991", "address": "fa:16:3e:89:91:c3", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c20637a-f8", "ovs_interfaceid": "7c20637a-f8f5-4a26-b5af-07db4b1c9991", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.815184] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1387dbc-ab4c-4ce9-affa-31ef8da78412 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.825739] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ae0c72-5ef6-492b-a2a4-e42a92b4ac9e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.859132] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4e0620-7bf2-40d8-9b47-e677c1d763cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.869235] env[63297]: DEBUG nova.network.neutron [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.878068] env[63297]: INFO nova.compute.manager [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Took 48.10 seconds to build instance. [ 1320.882032] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e082d76f-d60a-4878-bee1-622b894622b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.895643] env[63297]: DEBUG nova.compute.provider_tree [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1320.914009] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697207, 'name': ReconfigVM_Task, 'duration_secs': 0.724071} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.914562] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 8bc4bb67-bc00-44c6-9c83-c0a1072142e6/8bc4bb67-bc00-44c6-9c83-c0a1072142e6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1320.915201] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa60c3e3-cf33-4059-b5e2-7d2534cc805d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.923106] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1320.923106] env[63297]: value = "task-1697210" [ 1320.923106] env[63297]: _type = "Task" [ 1320.923106] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.931809] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697210, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.122677] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "refresh_cache-8fa5fef6-8768-4e24-aab3-db56a10588c2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.124030] env[63297]: DEBUG nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Instance network_info: |[{"id": "7c20637a-f8f5-4a26-b5af-07db4b1c9991", "address": "fa:16:3e:89:91:c3", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c20637a-f8", "ovs_interfaceid": "7c20637a-f8f5-4a26-b5af-07db4b1c9991", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1321.124030] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:91:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c20637a-f8f5-4a26-b5af-07db4b1c9991', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1321.135546] env[63297]: DEBUG oslo.service.loopingcall [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1321.139168] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1321.142152] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bdbc957-7132-4faf-b527-3ab7e1b4d901 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.160844] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1321.160844] env[63297]: value = "task-1697211" [ 1321.160844] env[63297]: _type = "Task" [ 1321.160844] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.169223] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697211, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.267029] env[63297]: DEBUG nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1321.302519] env[63297]: DEBUG nova.virt.hardware [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1321.302519] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb1977a-feda-4330-bab9-9fc3696157f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.313164] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8b0b99-6b9f-4bc6-bb7c-ee2fe341182b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.371984] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6121a24-804f-4091-ac0e-5feae34d844a req-c074fca7-1e42-4758-9fd2-28aa888ba169 service nova] Releasing lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.372468] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquired lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.372635] env[63297]: DEBUG nova.network.neutron [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1321.386103] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1750f3f7-0cf9-44c0-ab9e-127f056dda50 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.992s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.398500] env[63297]: DEBUG nova.scheduler.client.report [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1321.433785] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697210, 'name': Rename_Task, 'duration_secs': 0.31165} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.434404] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1321.434404] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41a90cff-7155-40ef-a5a7-86f6b3f4416e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.440911] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1321.440911] env[63297]: value = "task-1697212" [ 1321.440911] env[63297]: _type = "Task" [ 1321.440911] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.449185] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.640408] env[63297]: DEBUG nova.network.neutron [-] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.678024] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697211, 'name': CreateVM_Task, 'duration_secs': 0.461375} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.678024] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1321.678024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.678024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.678024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1321.678024] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0818fb04-5e2e-4820-ab1c-6357e148758d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.682603] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1321.682603] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5227f93b-163f-2e45-492e-d143ff0a0875" [ 1321.682603] env[63297]: _type = "Task" [ 1321.682603] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.692699] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5227f93b-163f-2e45-492e-d143ff0a0875, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.806068] env[63297]: DEBUG nova.compute.manager [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Received event network-changed-7c20637a-f8f5-4a26-b5af-07db4b1c9991 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1321.806279] env[63297]: DEBUG nova.compute.manager [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Refreshing instance network info cache due to event network-changed-7c20637a-f8f5-4a26-b5af-07db4b1c9991. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1321.806504] env[63297]: DEBUG oslo_concurrency.lockutils [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] Acquiring lock "refresh_cache-8fa5fef6-8768-4e24-aab3-db56a10588c2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.807179] env[63297]: DEBUG oslo_concurrency.lockutils [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] Acquired lock "refresh_cache-8fa5fef6-8768-4e24-aab3-db56a10588c2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.807391] env[63297]: DEBUG nova.network.neutron [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Refreshing network info cache for port 7c20637a-f8f5-4a26-b5af-07db4b1c9991 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1321.890722] env[63297]: DEBUG nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1321.909261] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.909261] env[63297]: DEBUG nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1321.914040] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.145s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.915616] env[63297]: INFO nova.compute.claims [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1321.918182] env[63297]: DEBUG nova.network.neutron [-] [instance: 92439795-6240-4103-940b-de6d87738570] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.951690] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697212, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.968960] env[63297]: DEBUG nova.network.neutron [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1322.078042] env[63297]: DEBUG nova.compute.manager [req-542f89e3-8be9-4772-9e9a-74782563febf req-b5e30b76-4fbc-4485-949b-75672dc95b8f service nova] [instance: 92439795-6240-4103-940b-de6d87738570] Received event network-vif-deleted-cf08728a-cae2-4f43-af9e-94a167d2750a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1322.145951] env[63297]: INFO nova.compute.manager [-] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Took 1.65 seconds to deallocate network for instance. [ 1322.194127] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5227f93b-163f-2e45-492e-d143ff0a0875, 'name': SearchDatastore_Task, 'duration_secs': 0.012505} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.195081] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.195313] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1322.195541] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.195684] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.195857] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1322.196378] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef91677a-1092-414c-b005-a8bffe7c4d7d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.205373] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1322.205577] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1322.206354] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf244419-cc38-45ec-9bef-9b9dabe05e63 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.212395] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1322.212395] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528da552-2da9-08fd-7f4e-c754cfd38a07" [ 1322.212395] env[63297]: _type = "Task" [ 1322.212395] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.219887] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528da552-2da9-08fd-7f4e-c754cfd38a07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.415019] env[63297]: DEBUG nova.compute.utils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1322.419363] env[63297]: DEBUG nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1322.419363] env[63297]: DEBUG nova.network.neutron [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1322.421770] env[63297]: INFO nova.compute.manager [-] [instance: 92439795-6240-4103-940b-de6d87738570] Took 1.95 seconds to deallocate network for instance. [ 1322.424838] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.454397] env[63297]: DEBUG oslo_vmware.api [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697212, 'name': PowerOnVM_Task, 'duration_secs': 0.552121} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.454668] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1322.454876] env[63297]: INFO nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1322.455092] env[63297]: DEBUG nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1322.455946] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a92113-c4fd-4740-879f-5676528a37c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.570548] env[63297]: DEBUG nova.policy [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '740582c6cd1d4a99ab17d64d3dfffb76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe4661ce8d764c42a57538f780b686ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1322.654105] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.710246] env[63297]: DEBUG nova.network.neutron [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Updating instance_info_cache with network_info: [{"id": "614d9b29-4dd8-4ac5-bbb4-bb43593e3386", "address": "fa:16:3e:66:ec:f1", "network": {"id": "4cc575c5-bae3-4aac-b61a-8bb5a678c03e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-117073918", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap614d9b29-4d", "ovs_interfaceid": "614d9b29-4dd8-4ac5-bbb4-bb43593e3386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6a0b5502-f624-47b8-b693-32a590e69f57", "address": "fa:16:3e:69:35:1b", "network": {"id": "d634e232-1f49-47e3-a59c-c72ec6d22aca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-41252518", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a0b5502-f6", "ovs_interfaceid": "6a0b5502-f624-47b8-b693-32a590e69f57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.732838] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528da552-2da9-08fd-7f4e-c754cfd38a07, 'name': SearchDatastore_Task, 'duration_secs': 0.00861} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.732838] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf9f84aa-97b0-453a-993b-3ce32b7b6bc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.738250] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1322.738250] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ce726a-6b7c-ba8c-719a-f9cdbbd53b52" [ 1322.738250] env[63297]: _type = "Task" [ 1322.738250] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.750697] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ce726a-6b7c-ba8c-719a-f9cdbbd53b52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.920607] env[63297]: DEBUG nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1322.935605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.950356] env[63297]: DEBUG nova.network.neutron [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Successfully updated port: fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1322.977726] env[63297]: INFO nova.compute.manager [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Took 47.58 seconds to build instance. [ 1323.018035] env[63297]: DEBUG nova.network.neutron [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Updated VIF entry in instance network info cache for port 7c20637a-f8f5-4a26-b5af-07db4b1c9991. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1323.018411] env[63297]: DEBUG nova.network.neutron [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Updating instance_info_cache with network_info: [{"id": "7c20637a-f8f5-4a26-b5af-07db4b1c9991", "address": "fa:16:3e:89:91:c3", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c20637a-f8", "ovs_interfaceid": "7c20637a-f8f5-4a26-b5af-07db4b1c9991", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.216548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Releasing lock "refresh_cache-2d7b237e-f86d-42b1-ab04-320f0012a2d1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.217196] env[63297]: DEBUG nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Instance network_info: |[{"id": "614d9b29-4dd8-4ac5-bbb4-bb43593e3386", "address": "fa:16:3e:66:ec:f1", "network": {"id": "4cc575c5-bae3-4aac-b61a-8bb5a678c03e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-117073918", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap614d9b29-4d", "ovs_interfaceid": "614d9b29-4dd8-4ac5-bbb4-bb43593e3386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6a0b5502-f624-47b8-b693-32a590e69f57", "address": "fa:16:3e:69:35:1b", "network": {"id": "d634e232-1f49-47e3-a59c-c72ec6d22aca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-41252518", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a0b5502-f6", "ovs_interfaceid": "6a0b5502-f624-47b8-b693-32a590e69f57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1323.218021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:ec:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ace50835-5731-4c77-b6c0-3076d7b4aa21', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '614d9b29-4dd8-4ac5-bbb4-bb43593e3386', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:35:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a0b5502-f624-47b8-b693-32a590e69f57', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1323.228806] env[63297]: DEBUG oslo.service.loopingcall [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1323.232184] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1323.232626] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40a026a5-8c4e-4315-94dc-7ce68918f8ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.264596] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ce726a-6b7c-ba8c-719a-f9cdbbd53b52, 'name': SearchDatastore_Task, 'duration_secs': 0.011068} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.268314] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.268509] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 8fa5fef6-8768-4e24-aab3-db56a10588c2/8fa5fef6-8768-4e24-aab3-db56a10588c2.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1323.268662] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1323.268662] env[63297]: value = "task-1697213" [ 1323.268662] env[63297]: _type = "Task" [ 1323.268662] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.269255] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ef6817b-ea0b-4a13-8e10-90af618c9c31 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.280028] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697213, 'name': CreateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.285437] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1323.285437] env[63297]: value = "task-1697214" [ 1323.285437] env[63297]: _type = "Task" [ 1323.285437] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.298428] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.458502] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "refresh_cache-d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.458597] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "refresh_cache-d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.458757] env[63297]: DEBUG nova.network.neutron [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1323.460117] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquiring lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.485018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8d9b7d-8768-406c-9a0f-f33496a33b96 tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.427s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.487906] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.028s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.488167] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquiring lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.488415] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.488631] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.500169] env[63297]: INFO nova.compute.manager [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Terminating instance [ 1323.502226] env[63297]: DEBUG nova.compute.manager [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1323.502461] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1323.503895] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b869b41-250e-48d0-a846-45bfd91ed723 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.510615] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94de3752-fa12-46a4-929e-e97268037fa9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.521343] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.521579] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b02634d-58ea-47b1-9f23-1209a8231978 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.524176] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6d20f5-27fc-469b-bea2-438e34c0e1f0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.527826] env[63297]: DEBUG oslo_concurrency.lockutils [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] Releasing lock "refresh_cache-8fa5fef6-8768-4e24-aab3-db56a10588c2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.528223] env[63297]: DEBUG nova.compute.manager [req-70a6d361-f227-4675-82b6-6bce9f99e0bd req-160efaf4-9d33-4233-8cd7-13ee93f79079 service nova] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Received event network-vif-deleted-d865dcd8-f11a-4c74-8534-760e6a578524 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1323.573519] env[63297]: DEBUG nova.network.neutron [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Successfully created port: 80a2f749-1281-4f8d-853e-5d5bca529aa3 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1323.578152] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87327979-f980-41e6-a5ec-b75e75096485 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.579806] env[63297]: DEBUG oslo_vmware.api [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1323.579806] env[63297]: value = "task-1697215" [ 1323.579806] env[63297]: _type = "Task" [ 1323.579806] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.590671] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2320c8-81da-460e-914d-974f5f42f056 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.599763] env[63297]: DEBUG oslo_vmware.api [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.611742] env[63297]: DEBUG nova.compute.provider_tree [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.783795] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697213, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.796464] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697214, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.931704] env[63297]: DEBUG nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1323.966299] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1323.966543] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1323.966697] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1323.967044] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1323.967611] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1323.967611] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1323.967611] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1323.967830] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1323.967894] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1323.968056] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1323.968260] env[63297]: DEBUG nova.virt.hardware [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1323.969218] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7a349f-7e4f-4ee6-a8fa-50b8fa40af1a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.978244] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967de226-3d82-4a2b-b53a-4b9355267677 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.995583] env[63297]: DEBUG nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1324.041699] env[63297]: DEBUG nova.network.neutron [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1324.093399] env[63297]: DEBUG oslo_vmware.api [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697215, 'name': PowerOffVM_Task, 'duration_secs': 0.352991} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.093699] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1324.094278] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1324.094648] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e01eeb1-648d-4749-a05a-c02af6adf15b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.115544] env[63297]: DEBUG nova.scheduler.client.report [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1324.223147] env[63297]: DEBUG nova.compute.manager [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Received event network-changed-468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1324.223361] env[63297]: DEBUG nova.compute.manager [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Refreshing instance network info cache due to event network-changed-468613e2-02e8-4bf5-9887-fc0f90ff2f75. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1324.223584] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Acquiring lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.223728] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Acquired lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.223888] env[63297]: DEBUG nova.network.neutron [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Refreshing network info cache for port 468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1324.284386] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697213, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.296280] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541652} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.296904] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 8fa5fef6-8768-4e24-aab3-db56a10588c2/8fa5fef6-8768-4e24-aab3-db56a10588c2.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1324.297348] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1324.297756] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b92ed492-3473-4ec9-bf11-81957b06743b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.307171] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1324.307171] env[63297]: value = "task-1697217" [ 1324.307171] env[63297]: _type = "Task" [ 1324.307171] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.317277] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.479373] env[63297]: DEBUG nova.network.neutron [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Updating instance_info_cache with network_info: [{"id": "fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc", "address": "fa:16:3e:0e:66:c2", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb21185a-d6", "ovs_interfaceid": "fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.530133] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.623137] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.623680] env[63297]: DEBUG nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1324.628223] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.907s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.628487] env[63297]: DEBUG nova.objects.instance [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lazy-loading 'resources' on Instance uuid 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1324.787856] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697213, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.816744] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.248934} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.816744] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1324.817898] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647f94d1-191f-473e-9a24-19e7b95748e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.844722] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 8fa5fef6-8768-4e24-aab3-db56a10588c2/8fa5fef6-8768-4e24-aab3-db56a10588c2.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1324.844998] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-853e7be6-6737-4596-98c1-2915019289b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.866191] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1324.866191] env[63297]: value = "task-1697218" [ 1324.866191] env[63297]: _type = "Task" [ 1324.866191] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.874920] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697218, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.984940] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "refresh_cache-d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.985408] env[63297]: DEBUG nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Instance network_info: |[{"id": "fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc", "address": "fa:16:3e:0e:66:c2", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb21185a-d6", "ovs_interfaceid": "fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1324.985866] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:66:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52358fcc-0d9f-45dd-8c75-db533fd992c3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1324.993791] env[63297]: DEBUG oslo.service.loopingcall [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1324.994036] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1324.994323] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-975569ed-40d4-4c02-9aa6-2057b7b0701d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.022448] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1325.022448] env[63297]: value = "task-1697219" [ 1325.022448] env[63297]: _type = "Task" [ 1325.022448] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.035679] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697219, 'name': CreateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.131888] env[63297]: DEBUG nova.compute.utils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1325.139889] env[63297]: DEBUG nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1325.140143] env[63297]: DEBUG nova.network.neutron [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1325.248755] env[63297]: DEBUG nova.policy [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '740582c6cd1d4a99ab17d64d3dfffb76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe4661ce8d764c42a57538f780b686ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1325.286976] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697213, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.387127] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.446269] env[63297]: DEBUG nova.network.neutron [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Updated VIF entry in instance network info cache for port 468613e2-02e8-4bf5-9887-fc0f90ff2f75. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1325.446269] env[63297]: DEBUG nova.network.neutron [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Updating instance_info_cache with network_info: [{"id": "468613e2-02e8-4bf5-9887-fc0f90ff2f75", "address": "fa:16:3e:bb:c6:4d", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap468613e2-02", "ovs_interfaceid": "468613e2-02e8-4bf5-9887-fc0f90ff2f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.494086] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1325.494309] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1325.494513] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Deleting the datastore file [datastore1] 8bc4bb67-bc00-44c6-9c83-c0a1072142e6 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.495464] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbb9cf1e-34a3-408e-850b-cf5d7c27f0d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.508767] env[63297]: DEBUG oslo_vmware.api [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for the task: (returnval){ [ 1325.508767] env[63297]: value = "task-1697220" [ 1325.508767] env[63297]: _type = "Task" [ 1325.508767] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.516427] env[63297]: DEBUG oslo_vmware.api [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.533650] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697219, 'name': CreateVM_Task, 'duration_secs': 0.41965} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.534058] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1325.535337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.535337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.535337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1325.535522] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60cce63f-7091-4835-b91c-f481da627138 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.544077] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1325.544077] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a5b79f-391b-0b67-23ee-77a89f03f6e6" [ 1325.544077] env[63297]: _type = "Task" [ 1325.544077] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.553217] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a5b79f-391b-0b67-23ee-77a89f03f6e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.645215] env[63297]: DEBUG nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1325.734123] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5ea353-7a92-444d-b2be-93b38a14b7c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.742902] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092e998b-6bf1-452d-b2bc-df04bbf71473 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.776666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742dcda4-e714-4390-bdf3-f74840a4b271 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.794933] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697213, 'name': CreateVM_Task, 'duration_secs': 2.316649} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.795205] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1325.796918] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4401064d-4f51-4cc3-b216-140d4f921752 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.802864] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.813664] env[63297]: DEBUG nova.compute.provider_tree [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.880579] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697218, 'name': ReconfigVM_Task, 'duration_secs': 0.77653} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.881063] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 8fa5fef6-8768-4e24-aab3-db56a10588c2/8fa5fef6-8768-4e24-aab3-db56a10588c2.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1325.881851] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd99d806-2234-47be-a0f8-d038e3a30040 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.888850] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1325.888850] env[63297]: value = "task-1697221" [ 1325.888850] env[63297]: _type = "Task" [ 1325.888850] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.899674] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697221, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.953800] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Releasing lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.954179] env[63297]: DEBUG nova.compute.manager [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Received event network-vif-plugged-fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1325.954497] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Acquiring lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.954800] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.955026] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.955255] env[63297]: DEBUG nova.compute.manager [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] No waiting events found dispatching network-vif-plugged-fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1325.955466] env[63297]: WARNING nova.compute.manager [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Received unexpected event network-vif-plugged-fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc for instance with vm_state building and task_state spawning. [ 1325.955674] env[63297]: DEBUG nova.compute.manager [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Received event network-changed-fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1325.955843] env[63297]: DEBUG nova.compute.manager [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Refreshing instance network info cache due to event network-changed-fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1325.956050] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Acquiring lock "refresh_cache-d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.956194] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Acquired lock "refresh_cache-d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.956363] env[63297]: DEBUG nova.network.neutron [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Refreshing network info cache for port fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1325.972088] env[63297]: DEBUG nova.network.neutron [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Successfully created port: 2dd60b07-2f52-40a1-96a9-05d6dd307592 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1326.016848] env[63297]: DEBUG oslo_vmware.api [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Task: {'id': task-1697220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167602} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.017181] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1326.017382] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1326.017598] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1326.017813] env[63297]: INFO nova.compute.manager [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Took 2.52 seconds to destroy the instance on the hypervisor. [ 1326.018108] env[63297]: DEBUG oslo.service.loopingcall [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1326.018340] env[63297]: DEBUG nova.compute.manager [-] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1326.018472] env[63297]: DEBUG nova.network.neutron [-] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1326.054874] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a5b79f-391b-0b67-23ee-77a89f03f6e6, 'name': SearchDatastore_Task, 'duration_secs': 0.012619} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.054874] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.054874] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1326.054874] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.055179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.055179] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.055558] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.056451] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1326.056451] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79f57e28-9492-4141-af01-823c35223a7c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.057972] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58c7b6ef-ed06-4a9a-ba46-23de32b85896 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.063903] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1326.063903] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d0238-8590-7783-8ad1-581acb8e96ee" [ 1326.063903] env[63297]: _type = "Task" [ 1326.063903] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.069309] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.069309] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1326.070191] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c5adaf2-4559-4c28-b533-44f68b58da16 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.075794] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d0238-8590-7783-8ad1-581acb8e96ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.080352] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1326.080352] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5242ca4b-cbcb-a1b8-8bb8-7963656c4cc5" [ 1326.080352] env[63297]: _type = "Task" [ 1326.080352] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.090355] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5242ca4b-cbcb-a1b8-8bb8-7963656c4cc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.093100] env[63297]: DEBUG nova.compute.manager [req-f750410a-5e32-47fe-adec-89f60fb8069b req-3fc431e0-2e71-4bed-a81b-bba257f73392 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Received event network-vif-plugged-80a2f749-1281-4f8d-853e-5d5bca529aa3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1326.093100] env[63297]: DEBUG oslo_concurrency.lockutils [req-f750410a-5e32-47fe-adec-89f60fb8069b req-3fc431e0-2e71-4bed-a81b-bba257f73392 service nova] Acquiring lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.093100] env[63297]: DEBUG oslo_concurrency.lockutils [req-f750410a-5e32-47fe-adec-89f60fb8069b req-3fc431e0-2e71-4bed-a81b-bba257f73392 service nova] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.093276] env[63297]: DEBUG oslo_concurrency.lockutils [req-f750410a-5e32-47fe-adec-89f60fb8069b req-3fc431e0-2e71-4bed-a81b-bba257f73392 service nova] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.093419] env[63297]: DEBUG nova.compute.manager [req-f750410a-5e32-47fe-adec-89f60fb8069b req-3fc431e0-2e71-4bed-a81b-bba257f73392 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] No waiting events found dispatching network-vif-plugged-80a2f749-1281-4f8d-853e-5d5bca529aa3 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1326.093592] env[63297]: WARNING nova.compute.manager [req-f750410a-5e32-47fe-adec-89f60fb8069b req-3fc431e0-2e71-4bed-a81b-bba257f73392 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Received unexpected event network-vif-plugged-80a2f749-1281-4f8d-853e-5d5bca529aa3 for instance with vm_state building and task_state spawning. [ 1326.131992] env[63297]: DEBUG nova.network.neutron [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Successfully updated port: 80a2f749-1281-4f8d-853e-5d5bca529aa3 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1326.317197] env[63297]: DEBUG nova.scheduler.client.report [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1326.400683] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697221, 'name': Rename_Task, 'duration_secs': 0.402958} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.400989] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1326.401258] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5468cc5-2371-49f8-b267-abaa205f3e43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.408507] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1326.408507] env[63297]: value = "task-1697222" [ 1326.408507] env[63297]: _type = "Task" [ 1326.408507] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.420199] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.579371] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d0238-8590-7783-8ad1-581acb8e96ee, 'name': SearchDatastore_Task, 'duration_secs': 0.011815} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.579983] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.580703] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1326.581098] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.596302] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5242ca4b-cbcb-a1b8-8bb8-7963656c4cc5, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.597048] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7869ef57-8818-4399-8e3e-1a7d78bff479 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.604035] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1326.604035] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e89d86-feaa-e969-5553-66271a234652" [ 1326.604035] env[63297]: _type = "Task" [ 1326.604035] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.617026] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e89d86-feaa-e969-5553-66271a234652, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.635575] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.635575] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.635575] env[63297]: DEBUG nova.network.neutron [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1326.661363] env[63297]: DEBUG nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1326.701282] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1326.702480] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1326.703162] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1326.703162] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1326.703162] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1326.703299] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1326.703403] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1326.703584] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1326.703752] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1326.703912] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1326.704094] env[63297]: DEBUG nova.virt.hardware [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1326.705010] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776265b0-03f9-406a-9a09-5483b2b2a083 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.719513] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1439be-4c5d-4ec1-800b-e784092ac5fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.758255] env[63297]: DEBUG nova.network.neutron [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Updated VIF entry in instance network info cache for port fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1326.759039] env[63297]: DEBUG nova.network.neutron [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Updating instance_info_cache with network_info: [{"id": "fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc", "address": "fa:16:3e:0e:66:c2", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb21185a-d6", "ovs_interfaceid": "fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.826848] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.199s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.828946] env[63297]: DEBUG nova.network.neutron [-] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.830363] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.878s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.831864] env[63297]: INFO nova.compute.claims [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1326.861336] env[63297]: INFO nova.scheduler.client.report [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Deleted allocations for instance 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf [ 1326.919143] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697222, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.116370] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e89d86-feaa-e969-5553-66271a234652, 'name': SearchDatastore_Task, 'duration_secs': 0.010673} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.116744] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.117020] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d15a7e98-755b-4c5c-ba34-dc5fc3f8846d/d15a7e98-755b-4c5c-ba34-dc5fc3f8846d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1327.117309] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.117500] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1327.117718] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a25b3623-e246-4fb7-804b-b5107473dbaa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.120081] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa49d590-767a-44cc-a0f9-0707ba347993 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.128339] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1327.128339] env[63297]: value = "task-1697223" [ 1327.128339] env[63297]: _type = "Task" [ 1327.128339] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.132935] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1327.133364] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1327.134739] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be443b0c-27e6-46f9-8c47-46c13f806dcd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.143297] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.146694] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1327.146694] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238e9f1-a4a1-99e4-d1f9-ee0ddbca7542" [ 1327.146694] env[63297]: _type = "Task" [ 1327.146694] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.155552] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238e9f1-a4a1-99e4-d1f9-ee0ddbca7542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.204032] env[63297]: DEBUG nova.network.neutron [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1327.262532] env[63297]: DEBUG oslo_concurrency.lockutils [req-736ebd75-9ba8-4794-8d88-8d64c9506de8 req-3fbd647d-ff1f-4fe2-9f92-579ba3f34804 service nova] Releasing lock "refresh_cache-d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.337997] env[63297]: INFO nova.compute.manager [-] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Took 1.32 seconds to deallocate network for instance. [ 1327.380038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2655c380-e2fe-492d-8549-100d85158a9b tempest-FloatingIPsAssociationNegativeTestJSON-1868147525 tempest-FloatingIPsAssociationNegativeTestJSON-1868147525-project-member] Lock "862302e5-ad7e-40f3-a4a3-8c4a8035e1cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.687s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.411174] env[63297]: DEBUG nova.network.neutron [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Updating instance_info_cache with network_info: [{"id": "80a2f749-1281-4f8d-853e-5d5bca529aa3", "address": "fa:16:3e:60:3e:20", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80a2f749-12", "ovs_interfaceid": "80a2f749-1281-4f8d-853e-5d5bca529aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.429071] env[63297]: DEBUG oslo_vmware.api [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697222, 'name': PowerOnVM_Task, 'duration_secs': 0.882482} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.430372] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1327.430372] env[63297]: INFO nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Took 8.72 seconds to spawn the instance on the hypervisor. [ 1327.430372] env[63297]: DEBUG nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1327.430952] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312eebe3-dffe-4ee4-857e-fc6cd8233e5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.638944] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697223, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.666649] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238e9f1-a4a1-99e4-d1f9-ee0ddbca7542, 'name': SearchDatastore_Task, 'duration_secs': 0.00892} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.667554] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecbebe9c-93a0-49f9-8f84-e740baed2f84 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.677172] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1327.677172] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5273e443-00b4-db6d-3311-73fcc7ff6f5b" [ 1327.677172] env[63297]: _type = "Task" [ 1327.677172] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.687415] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5273e443-00b4-db6d-3311-73fcc7ff6f5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.850054] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.921911] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.922359] env[63297]: DEBUG nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Instance network_info: |[{"id": "80a2f749-1281-4f8d-853e-5d5bca529aa3", "address": "fa:16:3e:60:3e:20", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80a2f749-12", "ovs_interfaceid": "80a2f749-1281-4f8d-853e-5d5bca529aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1327.922766] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:3e:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b71230ae-e879-4384-88ce-fe64c86fce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '80a2f749-1281-4f8d-853e-5d5bca529aa3', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1327.935614] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Creating folder: Project (fe4661ce8d764c42a57538f780b686ed). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1327.939127] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-520bd004-910f-43d1-ae28-e07d09799472 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.955964] env[63297]: INFO nova.compute.manager [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Took 44.81 seconds to build instance. [ 1327.965126] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Created folder: Project (fe4661ce8d764c42a57538f780b686ed) in parent group-v353718. [ 1327.965340] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Creating folder: Instances. Parent ref: group-v353844. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1327.965631] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa7937d6-b236-48c5-adab-64b99fa31722 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.978434] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Created folder: Instances in parent group-v353844. [ 1327.980691] env[63297]: DEBUG oslo.service.loopingcall [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.982830] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1327.983548] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7963590-eb1f-4b3a-926f-a6c95ea1634a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.008167] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1328.008167] env[63297]: value = "task-1697226" [ 1328.008167] env[63297]: _type = "Task" [ 1328.008167] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.015615] env[63297]: DEBUG nova.compute.manager [req-83aa4e1a-787d-4a91-9fe0-c157da3a267f req-19bce011-c3e4-49b7-8c36-ce128f14b94b service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Received event network-vif-plugged-2dd60b07-2f52-40a1-96a9-05d6dd307592 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1328.015843] env[63297]: DEBUG oslo_concurrency.lockutils [req-83aa4e1a-787d-4a91-9fe0-c157da3a267f req-19bce011-c3e4-49b7-8c36-ce128f14b94b service nova] Acquiring lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.016058] env[63297]: DEBUG oslo_concurrency.lockutils [req-83aa4e1a-787d-4a91-9fe0-c157da3a267f req-19bce011-c3e4-49b7-8c36-ce128f14b94b service nova] Lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.016235] env[63297]: DEBUG oslo_concurrency.lockutils [req-83aa4e1a-787d-4a91-9fe0-c157da3a267f req-19bce011-c3e4-49b7-8c36-ce128f14b94b service nova] Lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.016485] env[63297]: DEBUG nova.compute.manager [req-83aa4e1a-787d-4a91-9fe0-c157da3a267f req-19bce011-c3e4-49b7-8c36-ce128f14b94b service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] No waiting events found dispatching network-vif-plugged-2dd60b07-2f52-40a1-96a9-05d6dd307592 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1328.016606] env[63297]: WARNING nova.compute.manager [req-83aa4e1a-787d-4a91-9fe0-c157da3a267f req-19bce011-c3e4-49b7-8c36-ce128f14b94b service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Received unexpected event network-vif-plugged-2dd60b07-2f52-40a1-96a9-05d6dd307592 for instance with vm_state building and task_state spawning. [ 1328.030161] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697226, 'name': CreateVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.121039] env[63297]: DEBUG nova.network.neutron [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Successfully updated port: 2dd60b07-2f52-40a1-96a9-05d6dd307592 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1328.140252] env[63297]: DEBUG nova.compute.manager [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Received event network-changed-80a2f749-1281-4f8d-853e-5d5bca529aa3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1328.140450] env[63297]: DEBUG nova.compute.manager [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Refreshing instance network info cache due to event network-changed-80a2f749-1281-4f8d-853e-5d5bca529aa3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1328.140662] env[63297]: DEBUG oslo_concurrency.lockutils [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] Acquiring lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.140804] env[63297]: DEBUG oslo_concurrency.lockutils [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] Acquired lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.140965] env[63297]: DEBUG nova.network.neutron [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Refreshing network info cache for port 80a2f749-1281-4f8d-853e-5d5bca529aa3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1328.150137] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697223, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515059} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.150583] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d15a7e98-755b-4c5c-ba34-dc5fc3f8846d/d15a7e98-755b-4c5c-ba34-dc5fc3f8846d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1328.150960] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1328.151356] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c096c0c0-fc60-4c10-9fd3-61a73504cea9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.164438] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1328.164438] env[63297]: value = "task-1697227" [ 1328.164438] env[63297]: _type = "Task" [ 1328.164438] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.173187] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.189466] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.189712] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.190097] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5273e443-00b4-db6d-3311-73fcc7ff6f5b, 'name': SearchDatastore_Task, 'duration_secs': 0.010655} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.190209] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.191049] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 2d7b237e-f86d-42b1-ab04-320f0012a2d1/2d7b237e-f86d-42b1-ab04-320f0012a2d1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1328.191049] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7a4fdd1-72b8-4f63-9dba-e04b334a112b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.200032] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1328.200032] env[63297]: value = "task-1697228" [ 1328.200032] env[63297]: _type = "Task" [ 1328.200032] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.210425] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.420056] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f8de9b-a29d-439f-b5b0-6074aa3e5d77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.429350] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c37dcb-7ac2-491d-a515-0b6b6b831a1a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.465280] env[63297]: DEBUG oslo_concurrency.lockutils [None req-92099b5d-b96a-42ac-9f16-79bd8adccef7 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "8fa5fef6-8768-4e24-aab3-db56a10588c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.342s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.470937] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a0fe87-224f-42b6-b6e4-866271887b99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.479050] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c93c723-9023-44cd-a355-78312f31e2ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.497592] env[63297]: DEBUG nova.compute.provider_tree [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1328.517830] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697226, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.623876] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "refresh_cache-5e158880-81a6-4d35-b1df-6fd59ba4a8ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.624158] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "refresh_cache-5e158880-81a6-4d35-b1df-6fd59ba4a8ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.624348] env[63297]: DEBUG nova.network.neutron [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.675793] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.15836} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.676106] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1328.676939] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd8f5ff-78f9-45dd-b41f-31b7f4459760 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.707150] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] d15a7e98-755b-4c5c-ba34-dc5fc3f8846d/d15a7e98-755b-4c5c-ba34-dc5fc3f8846d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1328.707312] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01cabd93-c457-4ac6-8cf4-7dde1ec5fc42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.733937] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503728} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.736400] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 2d7b237e-f86d-42b1-ab04-320f0012a2d1/2d7b237e-f86d-42b1-ab04-320f0012a2d1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1328.736624] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1328.737914] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1db520fc-48d2-49ce-9bb8-993aff1a822e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.740462] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1328.740462] env[63297]: value = "task-1697229" [ 1328.740462] env[63297]: _type = "Task" [ 1328.740462] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.744805] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1328.744805] env[63297]: value = "task-1697230" [ 1328.744805] env[63297]: _type = "Task" [ 1328.744805] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.751272] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697229, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.756794] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697230, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.942962] env[63297]: DEBUG nova.network.neutron [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Updated VIF entry in instance network info cache for port 80a2f749-1281-4f8d-853e-5d5bca529aa3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1328.943424] env[63297]: DEBUG nova.network.neutron [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Updating instance_info_cache with network_info: [{"id": "80a2f749-1281-4f8d-853e-5d5bca529aa3", "address": "fa:16:3e:60:3e:20", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80a2f749-12", "ovs_interfaceid": "80a2f749-1281-4f8d-853e-5d5bca529aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.970906] env[63297]: DEBUG nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1329.002488] env[63297]: DEBUG nova.scheduler.client.report [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1329.021112] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697226, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.170127] env[63297]: DEBUG nova.network.neutron [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1329.266393] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697230, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.24249} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.273313] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1329.274118] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.274694] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0174e1c8-1e6b-44f2-a240-ec3d3d096973 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.306256] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 2d7b237e-f86d-42b1-ab04-320f0012a2d1/2d7b237e-f86d-42b1-ab04-320f0012a2d1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1329.306759] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a3614dc-14f1-46f6-a103-8fb8084380f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.328017] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1329.328017] env[63297]: value = "task-1697231" [ 1329.328017] env[63297]: _type = "Task" [ 1329.328017] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.338566] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697231, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.359535] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.359761] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.449906] env[63297]: DEBUG oslo_concurrency.lockutils [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] Releasing lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.450303] env[63297]: DEBUG nova.compute.manager [req-7c40d2c0-a9fa-4a14-945f-d20a839f2319 req-ede1e419-2091-4eab-a478-48b2bc086058 service nova] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Received event network-vif-deleted-ba10706f-1f6c-457c-8e5c-f18207b8577a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1329.486974] env[63297]: DEBUG nova.network.neutron [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Updating instance_info_cache with network_info: [{"id": "2dd60b07-2f52-40a1-96a9-05d6dd307592", "address": "fa:16:3e:e0:29:8e", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd60b07-2f", "ovs_interfaceid": "2dd60b07-2f52-40a1-96a9-05d6dd307592", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.507293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.512622] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.682s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.513174] env[63297]: DEBUG nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1329.517889] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.816s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.517889] env[63297]: DEBUG nova.objects.instance [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lazy-loading 'resources' on Instance uuid eff06e8a-8341-4d5e-b6dd-a585be4a21ea {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1329.530679] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697226, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.700048] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.700278] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.752241] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697229, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.838744] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.989819] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "refresh_cache-5e158880-81a6-4d35-b1df-6fd59ba4a8ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.991145] env[63297]: DEBUG nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Instance network_info: |[{"id": "2dd60b07-2f52-40a1-96a9-05d6dd307592", "address": "fa:16:3e:e0:29:8e", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd60b07-2f", "ovs_interfaceid": "2dd60b07-2f52-40a1-96a9-05d6dd307592", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1329.991145] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:29:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b71230ae-e879-4384-88ce-fe64c86fce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dd60b07-2f52-40a1-96a9-05d6dd307592', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.998976] env[63297]: DEBUG oslo.service.loopingcall [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.999329] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1329.999705] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90fe8f99-8ee6-479c-b454-fd6e026ab1ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.020689] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1330.020689] env[63297]: value = "task-1697232" [ 1330.020689] env[63297]: _type = "Task" [ 1330.020689] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.028041] env[63297]: DEBUG nova.compute.utils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1330.032394] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697226, 'name': CreateVM_Task, 'duration_secs': 1.533096} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.033152] env[63297]: DEBUG nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1330.033152] env[63297]: DEBUG nova.network.neutron [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1330.035228] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1330.035868] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.036072] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.036409] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1330.038798] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-617133da-dbcd-4fe6-bdd2-9aab45bf5336 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.044691] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697232, 'name': CreateVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.049973] env[63297]: DEBUG nova.compute.manager [req-64ed1498-27af-4c76-a1fa-c3e423d2267a req-94df7691-ee23-4900-8540-521bda4ae2f6 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Received event network-changed-2dd60b07-2f52-40a1-96a9-05d6dd307592 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1330.049973] env[63297]: DEBUG nova.compute.manager [req-64ed1498-27af-4c76-a1fa-c3e423d2267a req-94df7691-ee23-4900-8540-521bda4ae2f6 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Refreshing instance network info cache due to event network-changed-2dd60b07-2f52-40a1-96a9-05d6dd307592. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1330.049973] env[63297]: DEBUG oslo_concurrency.lockutils [req-64ed1498-27af-4c76-a1fa-c3e423d2267a req-94df7691-ee23-4900-8540-521bda4ae2f6 service nova] Acquiring lock "refresh_cache-5e158880-81a6-4d35-b1df-6fd59ba4a8ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.049973] env[63297]: DEBUG oslo_concurrency.lockutils [req-64ed1498-27af-4c76-a1fa-c3e423d2267a req-94df7691-ee23-4900-8540-521bda4ae2f6 service nova] Acquired lock "refresh_cache-5e158880-81a6-4d35-b1df-6fd59ba4a8ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.049973] env[63297]: DEBUG nova.network.neutron [req-64ed1498-27af-4c76-a1fa-c3e423d2267a req-94df7691-ee23-4900-8540-521bda4ae2f6 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Refreshing network info cache for port 2dd60b07-2f52-40a1-96a9-05d6dd307592 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1330.052731] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1330.052731] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b552dd-41cd-ca44-f506-3628bdb39b91" [ 1330.052731] env[63297]: _type = "Task" [ 1330.052731] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.063720] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b552dd-41cd-ca44-f506-3628bdb39b91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.087927] env[63297]: DEBUG nova.policy [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '740582c6cd1d4a99ab17d64d3dfffb76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe4661ce8d764c42a57538f780b686ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1330.254567] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697229, 'name': ReconfigVM_Task, 'duration_secs': 1.03954} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.254853] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Reconfigured VM instance instance-00000028 to attach disk [datastore1] d15a7e98-755b-4c5c-ba34-dc5fc3f8846d/d15a7e98-755b-4c5c-ba34-dc5fc3f8846d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1330.255473] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-453e0179-4fb4-4618-b38c-cac97735be54 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.261996] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1330.261996] env[63297]: value = "task-1697233" [ 1330.261996] env[63297]: _type = "Task" [ 1330.261996] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.271625] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697233, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.340158] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697231, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.433108] env[63297]: DEBUG nova.network.neutron [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Successfully created port: 565ddc43-5913-4f71-b8a3-0186c54805d3 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1330.533014] env[63297]: DEBUG nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1330.535663] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697232, 'name': CreateVM_Task, 'duration_secs': 0.404687} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.538153] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1330.540293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.562373] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e854cb02-507d-438e-aab9-6460364cd658 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.568697] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b552dd-41cd-ca44-f506-3628bdb39b91, 'name': SearchDatastore_Task, 'duration_secs': 0.012954} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.569357] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.569591] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1330.569815] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.570074] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.570376] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.570673] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.570975] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1330.571219] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6c92c27-f2d6-4a2a-b354-96a60c93eeaa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.574702] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-181a49cf-ab85-4eff-9d63-27659e7b7d6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.578022] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19579c3-9ff6-4445-be51-1b2ba45fe506 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.583634] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.583822] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1330.585838] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-124e2f83-6f7c-4e43-8588-bf5f85139018 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.589042] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1330.589042] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52edbe33-2ac2-7d1e-c146-d3baf17ee367" [ 1330.589042] env[63297]: _type = "Task" [ 1330.589042] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.619823] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85f9ff9-b85a-4da4-908a-918f5c39b0fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.625384] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1330.625384] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52abd43d-c5ad-1a76-ecb2-58f13c3bf159" [ 1330.625384] env[63297]: _type = "Task" [ 1330.625384] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.633915] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52edbe33-2ac2-7d1e-c146-d3baf17ee367, 'name': SearchDatastore_Task, 'duration_secs': 0.008248} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.634692] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.634920] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1330.635167] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.636478] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be18aa1-d61d-4cc5-932f-7b9f4754e691 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.643669] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52abd43d-c5ad-1a76-ecb2-58f13c3bf159, 'name': SearchDatastore_Task, 'duration_secs': 0.009508} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.644722] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36eb2160-6bf4-436a-a3e2-567c71213b36 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.656159] env[63297]: DEBUG nova.compute.provider_tree [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1330.660258] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1330.660258] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5230dda1-7759-88ba-89e9-927045bf3aa4" [ 1330.660258] env[63297]: _type = "Task" [ 1330.660258] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.668009] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5230dda1-7759-88ba-89e9-927045bf3aa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.773890] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697233, 'name': Rename_Task, 'duration_secs': 0.21649} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.776513] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1330.777707] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d504542c-0bb4-4863-ac5a-ade764dc5cff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.783132] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1330.783132] env[63297]: value = "task-1697234" [ 1330.783132] env[63297]: _type = "Task" [ 1330.783132] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.791541] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.843635] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697231, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.906312] env[63297]: DEBUG nova.network.neutron [req-64ed1498-27af-4c76-a1fa-c3e423d2267a req-94df7691-ee23-4900-8540-521bda4ae2f6 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Updated VIF entry in instance network info cache for port 2dd60b07-2f52-40a1-96a9-05d6dd307592. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1330.906703] env[63297]: DEBUG nova.network.neutron [req-64ed1498-27af-4c76-a1fa-c3e423d2267a req-94df7691-ee23-4900-8540-521bda4ae2f6 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Updating instance_info_cache with network_info: [{"id": "2dd60b07-2f52-40a1-96a9-05d6dd307592", "address": "fa:16:3e:e0:29:8e", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd60b07-2f", "ovs_interfaceid": "2dd60b07-2f52-40a1-96a9-05d6dd307592", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.162031] env[63297]: DEBUG nova.scheduler.client.report [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1331.175021] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5230dda1-7759-88ba-89e9-927045bf3aa4, 'name': SearchDatastore_Task, 'duration_secs': 0.009048} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.176022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.176022] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f429dd9b-be6c-4e90-876b-3a3931fb1c4a/f429dd9b-be6c-4e90-876b-3a3931fb1c4a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1331.176763] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.177124] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.177473] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-651b4dc2-c042-4ef4-b5e9-0601b03d19fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.179594] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-072188be-a3f4-409a-b2b3-809f18260a51 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.189225] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1331.189225] env[63297]: value = "task-1697235" [ 1331.189225] env[63297]: _type = "Task" [ 1331.189225] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.191948] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.192466] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.193817] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b08162bd-0b60-4222-b6c9-cd7000bdc671 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.199535] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697235, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.203663] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1331.203663] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522abd93-4fed-0933-34b0-b6c4e1cce3cb" [ 1331.203663] env[63297]: _type = "Task" [ 1331.203663] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.211502] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522abd93-4fed-0933-34b0-b6c4e1cce3cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.298542] env[63297]: DEBUG oslo_vmware.api [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697234, 'name': PowerOnVM_Task, 'duration_secs': 0.500565} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.299481] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1331.299990] env[63297]: INFO nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1331.301915] env[63297]: DEBUG nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1331.303429] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1c76fa-d174-4540-b23a-85b944a0792f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.343512] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697231, 'name': ReconfigVM_Task, 'duration_secs': 1.673756} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.343811] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 2d7b237e-f86d-42b1-ab04-320f0012a2d1/2d7b237e-f86d-42b1-ab04-320f0012a2d1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1331.344569] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1034d24-2ca2-4ce5-82cd-639fa134156b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.350991] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1331.350991] env[63297]: value = "task-1697236" [ 1331.350991] env[63297]: _type = "Task" [ 1331.350991] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.360920] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697236, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.409892] env[63297]: DEBUG oslo_concurrency.lockutils [req-64ed1498-27af-4c76-a1fa-c3e423d2267a req-94df7691-ee23-4900-8540-521bda4ae2f6 service nova] Releasing lock "refresh_cache-5e158880-81a6-4d35-b1df-6fd59ba4a8ff" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.547527] env[63297]: DEBUG nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1331.580652] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1331.580926] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1331.581228] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1331.581441] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1331.581591] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1331.581742] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1331.581958] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1331.582176] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1331.582340] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1331.582512] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1331.582704] env[63297]: DEBUG nova.virt.hardware [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1331.583725] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd0cdd3-ebcb-4c70-9131-e67f463224ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.596692] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ca0e38-50c3-4ab7-96ed-91576e9b934a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.669530] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.153s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.672727] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.202s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.676934] env[63297]: INFO nova.compute.claims [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1331.698811] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697235, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.699866] env[63297]: INFO nova.scheduler.client.report [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Deleted allocations for instance eff06e8a-8341-4d5e-b6dd-a585be4a21ea [ 1331.720172] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522abd93-4fed-0933-34b0-b6c4e1cce3cb, 'name': SearchDatastore_Task, 'duration_secs': 0.019708} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.722116] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1080f7e-fd02-4a80-9d5b-2e9752c4929a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.729122] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1331.729122] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ca5abc-136b-fe45-ac43-9da4f8d4724f" [ 1331.729122] env[63297]: _type = "Task" [ 1331.729122] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.739253] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ca5abc-136b-fe45-ac43-9da4f8d4724f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.824304] env[63297]: INFO nova.compute.manager [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Took 43.29 seconds to build instance. [ 1331.862721] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697236, 'name': Rename_Task, 'duration_secs': 0.204583} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.862987] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1331.863334] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-916cd45d-ba1d-4c5b-9905-8526ac8321af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.871785] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1331.871785] env[63297]: value = "task-1697237" [ 1331.871785] env[63297]: _type = "Task" [ 1331.871785] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.877662] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.181354] env[63297]: DEBUG nova.network.neutron [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Successfully updated port: 565ddc43-5913-4f71-b8a3-0186c54805d3 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1332.189816] env[63297]: DEBUG nova.compute.manager [req-30ee2de1-f933-446e-86e0-f585dfe77098 req-9c902529-f617-442c-806b-487984b504ae service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Received event network-vif-plugged-565ddc43-5913-4f71-b8a3-0186c54805d3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1332.190048] env[63297]: DEBUG oslo_concurrency.lockutils [req-30ee2de1-f933-446e-86e0-f585dfe77098 req-9c902529-f617-442c-806b-487984b504ae service nova] Acquiring lock "99cc8af3-5c18-4839-94db-996861e0c276-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.190281] env[63297]: DEBUG oslo_concurrency.lockutils [req-30ee2de1-f933-446e-86e0-f585dfe77098 req-9c902529-f617-442c-806b-487984b504ae service nova] Lock "99cc8af3-5c18-4839-94db-996861e0c276-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.190457] env[63297]: DEBUG oslo_concurrency.lockutils [req-30ee2de1-f933-446e-86e0-f585dfe77098 req-9c902529-f617-442c-806b-487984b504ae service nova] Lock "99cc8af3-5c18-4839-94db-996861e0c276-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.190627] env[63297]: DEBUG nova.compute.manager [req-30ee2de1-f933-446e-86e0-f585dfe77098 req-9c902529-f617-442c-806b-487984b504ae service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] No waiting events found dispatching network-vif-plugged-565ddc43-5913-4f71-b8a3-0186c54805d3 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1332.190783] env[63297]: WARNING nova.compute.manager [req-30ee2de1-f933-446e-86e0-f585dfe77098 req-9c902529-f617-442c-806b-487984b504ae service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Received unexpected event network-vif-plugged-565ddc43-5913-4f71-b8a3-0186c54805d3 for instance with vm_state building and task_state spawning. [ 1332.203043] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697235, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522878} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.203450] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f429dd9b-be6c-4e90-876b-3a3931fb1c4a/f429dd9b-be6c-4e90-876b-3a3931fb1c4a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1332.203561] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1332.204325] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8fadcc78-fd36-4989-8321-0ecf93ed034a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.212134] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1332.212134] env[63297]: value = "task-1697238" [ 1332.212134] env[63297]: _type = "Task" [ 1332.212134] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.221033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7aebafc2-90a4-4b66-b27d-740247095a91 tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "eff06e8a-8341-4d5e-b6dd-a585be4a21ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.550s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.227054] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697238, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.241541] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ca5abc-136b-fe45-ac43-9da4f8d4724f, 'name': SearchDatastore_Task, 'duration_secs': 0.008499} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.241541] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.241541] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5e158880-81a6-4d35-b1df-6fd59ba4a8ff/5e158880-81a6-4d35-b1df-6fd59ba4a8ff.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1332.241541] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-990efc4a-6fec-4e15-b8f1-c57f248d01e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.252093] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1332.252093] env[63297]: value = "task-1697239" [ 1332.252093] env[63297]: _type = "Task" [ 1332.252093] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.262369] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.328525] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dccec896-e709-46eb-a72a-f5a1b017738b tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.532s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.380818] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697237, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.626280] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "4438e230-0589-48ae-8848-d1f8414efa61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.626280] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "4438e230-0589-48ae-8848-d1f8414efa61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.626771] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "4438e230-0589-48ae-8848-d1f8414efa61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.627165] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "4438e230-0589-48ae-8848-d1f8414efa61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.627291] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "4438e230-0589-48ae-8848-d1f8414efa61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.630308] env[63297]: INFO nova.compute.manager [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Terminating instance [ 1332.633392] env[63297]: DEBUG nova.compute.manager [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1332.633392] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1332.635477] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9e79de-852e-499e-80dc-18595a4d845f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.645742] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1332.646215] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb98cef8-da26-4894-94e9-d5ca4fa38c07 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.653610] env[63297]: DEBUG oslo_vmware.api [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1332.653610] env[63297]: value = "task-1697240" [ 1332.653610] env[63297]: _type = "Task" [ 1332.653610] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.664600] env[63297]: DEBUG oslo_vmware.api [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.692597] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "refresh_cache-99cc8af3-5c18-4839-94db-996861e0c276" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.692832] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "refresh_cache-99cc8af3-5c18-4839-94db-996861e0c276" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.692902] env[63297]: DEBUG nova.network.neutron [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1332.726741] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697238, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068838} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.726836] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1332.727853] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bd7e6e-565d-4448-92a4-594208ce0087 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.754163] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] f429dd9b-be6c-4e90-876b-3a3931fb1c4a/f429dd9b-be6c-4e90-876b-3a3931fb1c4a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1332.757566] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-875373a6-56a3-4c94-bba2-3a46d47b25d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.788868] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697239, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484982} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.790893] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5e158880-81a6-4d35-b1df-6fd59ba4a8ff/5e158880-81a6-4d35-b1df-6fd59ba4a8ff.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1332.790893] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1332.791514] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1332.791514] env[63297]: value = "task-1697241" [ 1332.791514] env[63297]: _type = "Task" [ 1332.791514] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.793138] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4aafa3d-0275-4e1a-94b2-6e52809c9923 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.807402] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697241, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.808884] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1332.808884] env[63297]: value = "task-1697242" [ 1332.808884] env[63297]: _type = "Task" [ 1332.808884] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.822499] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.830809] env[63297]: DEBUG nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1332.881927] env[63297]: DEBUG oslo_vmware.api [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697237, 'name': PowerOnVM_Task, 'duration_secs': 0.764529} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.884817] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1332.885064] env[63297]: INFO nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Took 17.19 seconds to spawn the instance on the hypervisor. [ 1332.885307] env[63297]: DEBUG nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1332.886869] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be53c7c1-8461-45b9-97dc-c03c39bd95fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.173566] env[63297]: DEBUG oslo_vmware.api [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697240, 'name': PowerOffVM_Task, 'duration_secs': 0.496166} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.173843] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1333.174052] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1333.174287] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95b0001f-11ef-43c3-9bdd-72abae113394 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.238907] env[63297]: DEBUG nova.network.neutron [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1333.271523] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1333.271801] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1333.272031] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Deleting the datastore file [datastore1] 4438e230-0589-48ae-8848-d1f8414efa61 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1333.272901] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a915a75-d635-43b1-be0c-ef95889839fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.282897] env[63297]: DEBUG oslo_vmware.api [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for the task: (returnval){ [ 1333.282897] env[63297]: value = "task-1697244" [ 1333.282897] env[63297]: _type = "Task" [ 1333.282897] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.294943] env[63297]: DEBUG oslo_vmware.api [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.309386] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697241, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.322274] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697242, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.363763] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.391365] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab122d7d-7a15-435e-8245-f530a1e76821 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.405818] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163fd81a-896f-484f-b80e-d0c70d9dc10c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.412377] env[63297]: INFO nova.compute.manager [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Took 51.88 seconds to build instance. [ 1333.460871] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa9d97f-b3fa-4308-b4c1-d1dd3d1c7006 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.468018] env[63297]: DEBUG nova.network.neutron [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Updating instance_info_cache with network_info: [{"id": "565ddc43-5913-4f71-b8a3-0186c54805d3", "address": "fa:16:3e:39:39:40", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap565ddc43-59", "ovs_interfaceid": "565ddc43-5913-4f71-b8a3-0186c54805d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.472561] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9784d02-ce3f-461c-adab-c27ecfb34c53 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.492315] env[63297]: DEBUG nova.compute.provider_tree [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.794256] env[63297]: DEBUG oslo_vmware.api [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Task: {'id': task-1697244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145138} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.794691] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1333.795251] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1333.796159] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1333.796436] env[63297]: INFO nova.compute.manager [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1333.796750] env[63297]: DEBUG oslo.service.loopingcall [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1333.797111] env[63297]: DEBUG nova.compute.manager [-] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1333.797267] env[63297]: DEBUG nova.network.neutron [-] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1333.808476] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697241, 'name': ReconfigVM_Task, 'duration_secs': 0.8805} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.808827] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Reconfigured VM instance instance-00000029 to attach disk [datastore1] f429dd9b-be6c-4e90-876b-3a3931fb1c4a/f429dd9b-be6c-4e90-876b-3a3931fb1c4a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1333.809559] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e665559-b69e-4642-a4ea-ba4adfb921e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.823356] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1333.823356] env[63297]: value = "task-1697245" [ 1333.823356] env[63297]: _type = "Task" [ 1333.823356] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.826182] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.539523} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.826575] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1333.830501] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8f1b08-9291-490c-8714-6a84f9e8b1fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.837881] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697245, 'name': Rename_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.856676] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 5e158880-81a6-4d35-b1df-6fd59ba4a8ff/5e158880-81a6-4d35-b1df-6fd59ba4a8ff.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1333.857156] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ed9bfde-e4e5-4b22-a8be-9c3a72076ed1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.877615] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1333.877615] env[63297]: value = "task-1697246" [ 1333.877615] env[63297]: _type = "Task" [ 1333.877615] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.887034] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697246, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.920854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4941725e-123a-4ca0-b3e9-e08ff2d36d4f tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.237s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.960488] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.960760] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.961084] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.961382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.961566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.964129] env[63297]: INFO nova.compute.manager [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Terminating instance [ 1333.966295] env[63297]: DEBUG nova.compute.manager [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1333.967375] env[63297]: DEBUG nova.compute.manager [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1333.967625] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1333.968074] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "refresh_cache-99cc8af3-5c18-4839-94db-996861e0c276" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.968786] env[63297]: DEBUG nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Instance network_info: |[{"id": "565ddc43-5913-4f71-b8a3-0186c54805d3", "address": "fa:16:3e:39:39:40", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap565ddc43-59", "ovs_interfaceid": "565ddc43-5913-4f71-b8a3-0186c54805d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1333.970351] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33713842-222e-4f00-9134-c01fc50af504 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.974175] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede95c44-e9d6-48b5-b976-bff39011f961 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.977105] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:39:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b71230ae-e879-4384-88ce-fe64c86fce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '565ddc43-5913-4f71-b8a3-0186c54805d3', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1333.985252] env[63297]: DEBUG oslo.service.loopingcall [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1333.986352] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1333.987687] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-236f26d9-b876-4ba2-a1a8-dfeea0cd5871 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.011877] env[63297]: DEBUG nova.scheduler.client.report [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1334.020361] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1334.020471] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a97fb8d5-f5fb-402f-b111-aed419508e09 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.024262] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1334.024262] env[63297]: value = "task-1697247" [ 1334.024262] env[63297]: _type = "Task" [ 1334.024262] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.029646] env[63297]: DEBUG oslo_vmware.api [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1334.029646] env[63297]: value = "task-1697248" [ 1334.029646] env[63297]: _type = "Task" [ 1334.029646] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.039113] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697247, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.047903] env[63297]: DEBUG oslo_vmware.api [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697248, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.243799] env[63297]: DEBUG nova.compute.manager [req-699a61c3-cfe5-493b-80a8-eb212c388b73 req-6f200ea6-9681-48e7-997b-d03480658773 service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Received event network-changed-565ddc43-5913-4f71-b8a3-0186c54805d3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1334.244009] env[63297]: DEBUG nova.compute.manager [req-699a61c3-cfe5-493b-80a8-eb212c388b73 req-6f200ea6-9681-48e7-997b-d03480658773 service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Refreshing instance network info cache due to event network-changed-565ddc43-5913-4f71-b8a3-0186c54805d3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1334.244526] env[63297]: DEBUG oslo_concurrency.lockutils [req-699a61c3-cfe5-493b-80a8-eb212c388b73 req-6f200ea6-9681-48e7-997b-d03480658773 service nova] Acquiring lock "refresh_cache-99cc8af3-5c18-4839-94db-996861e0c276" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.244688] env[63297]: DEBUG oslo_concurrency.lockutils [req-699a61c3-cfe5-493b-80a8-eb212c388b73 req-6f200ea6-9681-48e7-997b-d03480658773 service nova] Acquired lock "refresh_cache-99cc8af3-5c18-4839-94db-996861e0c276" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.244858] env[63297]: DEBUG nova.network.neutron [req-699a61c3-cfe5-493b-80a8-eb212c388b73 req-6f200ea6-9681-48e7-997b-d03480658773 service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Refreshing network info cache for port 565ddc43-5913-4f71-b8a3-0186c54805d3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1334.337324] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697245, 'name': Rename_Task, 'duration_secs': 0.309039} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.337780] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1334.338058] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-192eee17-3989-4658-83d8-89a4504661c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.344805] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1334.344805] env[63297]: value = "task-1697249" [ 1334.344805] env[63297]: _type = "Task" [ 1334.344805] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.353611] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.391157] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697246, 'name': ReconfigVM_Task, 'duration_secs': 0.430162} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.391465] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 5e158880-81a6-4d35-b1df-6fd59ba4a8ff/5e158880-81a6-4d35-b1df-6fd59ba4a8ff.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1334.392213] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2dfdd69b-2f1d-4d8f-9c98-9aba678f7e29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.399118] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1334.399118] env[63297]: value = "task-1697250" [ 1334.399118] env[63297]: _type = "Task" [ 1334.399118] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.410812] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697250, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.423596] env[63297]: DEBUG nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1334.521111] env[63297]: INFO nova.compute.manager [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] instance snapshotting [ 1334.523811] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.851s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.523811] env[63297]: DEBUG nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1334.526761] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 28.544s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.527046] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.527212] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1334.527485] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.388s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.527669] env[63297]: DEBUG nova.objects.instance [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lazy-loading 'resources' on Instance uuid 581f9d48-dcb8-4a34-928b-64087a9f966b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1334.532191] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c128b7-6f6a-49f9-8e9f-acdf3b6edb30 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.535204] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bddee64-8ee3-4c0a-91cc-dc4a411a43c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.573578] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103ea916-cc75-4bf9-b7c5-7b7849cd3e21 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.579357] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697247, 'name': CreateVM_Task, 'duration_secs': 0.476529} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.581637] env[63297]: DEBUG oslo_vmware.api [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697248, 'name': PowerOffVM_Task, 'duration_secs': 0.284491} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.581637] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1334.581637] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1334.581791] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1334.582548] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6abe3c-8c1d-4d2d-95e6-042c04c04b06 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.586080] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.586255] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.586610] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1334.587150] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-455bb5fa-630f-47ec-adb3-79a8e1db6f96 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.598259] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fba86923-d511-44d4-ab45-69f5e360fb4f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.602388] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbf0b36-a0af-42e4-888d-d9c1c717633f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.615139] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1334.615139] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520f4359-2096-69b5-492e-87b955e0e51a" [ 1334.615139] env[63297]: _type = "Task" [ 1334.615139] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.620638] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5501b46e-6523-4112-983a-34793d13465f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.631179] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520f4359-2096-69b5-492e-87b955e0e51a, 'name': SearchDatastore_Task, 'duration_secs': 0.00973} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.631179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.631179] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1334.631179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.631179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.631179] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1334.631179] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78dde043-fe44-4ad7-aa4e-0b8284d9b7c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.658039] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179129MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1334.658216] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.666021] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1334.666213] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1334.667057] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f622abd-e009-404c-a470-b3b851d5f0ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.672633] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1334.672633] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b5bbf-7aa4-8c28-ef5c-fda93e85e933" [ 1334.672633] env[63297]: _type = "Task" [ 1334.672633] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.681554] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b5bbf-7aa4-8c28-ef5c-fda93e85e933, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.724157] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1334.724388] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1334.724566] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Deleting the datastore file [datastore1] 2d7b237e-f86d-42b1-ab04-320f0012a2d1 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1334.724900] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1b9665b-480a-49e6-a64e-cda5b14644b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.731750] env[63297]: DEBUG oslo_vmware.api [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for the task: (returnval){ [ 1334.731750] env[63297]: value = "task-1697252" [ 1334.731750] env[63297]: _type = "Task" [ 1334.731750] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.743023] env[63297]: DEBUG oslo_vmware.api [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.859989] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697249, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.911439] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697250, 'name': Rename_Task, 'duration_secs': 0.169567} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.911439] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1334.911439] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1269d68-79ea-4586-8118-69e823ef368d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.917761] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1334.917761] env[63297]: value = "task-1697253" [ 1334.917761] env[63297]: _type = "Task" [ 1334.917761] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.926526] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697253, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.967464] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.036819] env[63297]: DEBUG nova.compute.utils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1335.041167] env[63297]: DEBUG nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1335.041347] env[63297]: DEBUG nova.network.neutron [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1335.043218] env[63297]: DEBUG nova.network.neutron [-] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.091962] env[63297]: DEBUG nova.policy [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '562cad08586e45328f452cb57ebe6d23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f55b91c7fce9495aa8060f3f02ff7bda', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1335.114853] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1335.115255] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-14492ea1-e9fb-4485-8091-e9e41341b5a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.125748] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1335.125748] env[63297]: value = "task-1697254" [ 1335.125748] env[63297]: _type = "Task" [ 1335.125748] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.160020] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697254, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.186551] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b5bbf-7aa4-8c28-ef5c-fda93e85e933, 'name': SearchDatastore_Task, 'duration_secs': 0.008555} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.187766] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ae03c9a-679a-4471-9ec0-b16d899b766c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.195707] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1335.195707] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8890e-dfb5-3fdd-f03f-ff1d299af2fb" [ 1335.195707] env[63297]: _type = "Task" [ 1335.195707] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.205123] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8890e-dfb5-3fdd-f03f-ff1d299af2fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.231411] env[63297]: DEBUG nova.network.neutron [req-699a61c3-cfe5-493b-80a8-eb212c388b73 req-6f200ea6-9681-48e7-997b-d03480658773 service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Updated VIF entry in instance network info cache for port 565ddc43-5913-4f71-b8a3-0186c54805d3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.231791] env[63297]: DEBUG nova.network.neutron [req-699a61c3-cfe5-493b-80a8-eb212c388b73 req-6f200ea6-9681-48e7-997b-d03480658773 service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Updating instance_info_cache with network_info: [{"id": "565ddc43-5913-4f71-b8a3-0186c54805d3", "address": "fa:16:3e:39:39:40", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap565ddc43-59", "ovs_interfaceid": "565ddc43-5913-4f71-b8a3-0186c54805d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.245877] env[63297]: DEBUG oslo_vmware.api [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Task: {'id': task-1697252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178437} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.246779] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.247027] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1335.247289] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1335.247471] env[63297]: INFO nova.compute.manager [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1335.247781] env[63297]: DEBUG oslo.service.loopingcall [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.248723] env[63297]: DEBUG nova.compute.manager [-] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1335.248723] env[63297]: DEBUG nova.network.neutron [-] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1335.356188] env[63297]: DEBUG oslo_vmware.api [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697249, 'name': PowerOnVM_Task, 'duration_secs': 0.535102} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.358871] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1335.362018] env[63297]: INFO nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Took 11.43 seconds to spawn the instance on the hypervisor. [ 1335.362018] env[63297]: DEBUG nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1335.362018] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30e87a2-d92e-44d1-add9-c3f82fe8f7bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.434415] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697253, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.508399] env[63297]: DEBUG nova.network.neutron [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Successfully created port: faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1335.541928] env[63297]: DEBUG nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1335.549035] env[63297]: INFO nova.compute.manager [-] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Took 1.75 seconds to deallocate network for instance. [ 1335.651921] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697254, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.660256] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bda44af-794e-41f6-ac40-6f9b5cc78c5e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.669570] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebd5a52-e3a5-42af-8a39-6e73de633ba5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.707268] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b913fce2-e280-4813-826f-df70304fc814 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.715383] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8890e-dfb5-3fdd-f03f-ff1d299af2fb, 'name': SearchDatastore_Task, 'duration_secs': 0.01205} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.717549] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.718015] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 99cc8af3-5c18-4839-94db-996861e0c276/99cc8af3-5c18-4839-94db-996861e0c276.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1335.718328] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9e08bd0-940d-4579-b835-353ffb2f2c82 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.721128] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7ac7a8-f41a-48e0-ad8e-6c17771d463a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.734731] env[63297]: DEBUG nova.compute.provider_tree [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1335.737525] env[63297]: DEBUG oslo_concurrency.lockutils [req-699a61c3-cfe5-493b-80a8-eb212c388b73 req-6f200ea6-9681-48e7-997b-d03480658773 service nova] Releasing lock "refresh_cache-99cc8af3-5c18-4839-94db-996861e0c276" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.737982] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1335.737982] env[63297]: value = "task-1697255" [ 1335.737982] env[63297]: _type = "Task" [ 1335.737982] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.746656] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.879629] env[63297]: INFO nova.compute.manager [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Took 43.87 seconds to build instance. [ 1335.931768] env[63297]: DEBUG oslo_vmware.api [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697253, 'name': PowerOnVM_Task, 'duration_secs': 0.732633} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.933806] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1335.933913] env[63297]: INFO nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Took 9.27 seconds to spawn the instance on the hypervisor. [ 1335.934465] env[63297]: DEBUG nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1335.935366] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc158219-66f0-4d06-a294-ef5b991d95f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.053666] env[63297]: INFO nova.virt.block_device [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Booting with volume 1cf3188d-0b66-4933-9595-057e902e5d2b at /dev/sda [ 1336.073518] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.109368] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d896904a-31d2-425d-92e2-0ff4878079d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.120039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1718a97d-f44a-4289-8b98-93a52b256f2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.154146] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-167006c4-eae6-4b28-95a4-940db9c91a10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.160168] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697254, 'name': CreateSnapshot_Task, 'duration_secs': 0.79606} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.160923] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1336.161792] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee31a1b1-bd11-4623-923e-c13fb05c2317 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.168804] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f3e3e5-0c80-4160-8637-be4e9eb5fd58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.212513] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faac9c20-07e3-4726-9e34-39b5236d4132 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.220628] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fec0d8-514c-4629-a841-a0a1df185030 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.235575] env[63297]: DEBUG nova.virt.block_device [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Updating existing volume attachment record: 156259cc-adf4-4941-b08c-450fb512d4b6 {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1336.240130] env[63297]: DEBUG nova.scheduler.client.report [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1336.253232] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697255, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520847} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.253630] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 99cc8af3-5c18-4839-94db-996861e0c276/99cc8af3-5c18-4839-94db-996861e0c276.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1336.253836] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1336.254139] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e91b88f-9131-4cda-8f60-210a57f2e78a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.260649] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1336.260649] env[63297]: value = "task-1697256" [ 1336.260649] env[63297]: _type = "Task" [ 1336.260649] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.270681] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697256, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.280930] env[63297]: DEBUG nova.compute.manager [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Received event network-vif-deleted-49d5fb68-a759-487e-b35a-545fe16b7625 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1336.281156] env[63297]: DEBUG nova.compute.manager [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Received event network-vif-deleted-6a0b5502-f624-47b8-b693-32a590e69f57 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1336.281323] env[63297]: INFO nova.compute.manager [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Neutron deleted interface 6a0b5502-f624-47b8-b693-32a590e69f57; detaching it from the instance and deleting it from the info cache [ 1336.281580] env[63297]: DEBUG nova.network.neutron [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Updating instance_info_cache with network_info: [{"id": "614d9b29-4dd8-4ac5-bbb4-bb43593e3386", "address": "fa:16:3e:66:ec:f1", "network": {"id": "4cc575c5-bae3-4aac-b61a-8bb5a678c03e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-117073918", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babfd205ed454924b0bceb1d03fcfdf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap614d9b29-4d", "ovs_interfaceid": "614d9b29-4dd8-4ac5-bbb4-bb43593e3386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.382856] env[63297]: DEBUG oslo_concurrency.lockutils [None req-734a945d-59a5-4b30-aa0b-b51f41abe46f tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.479s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.435707] env[63297]: DEBUG nova.network.neutron [-] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.453672] env[63297]: INFO nova.compute.manager [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Took 40.71 seconds to build instance. [ 1336.697601] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1336.697999] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7ea35782-3e57-4fda-ba30-75cff1e288dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.708562] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1336.708562] env[63297]: value = "task-1697257" [ 1336.708562] env[63297]: _type = "Task" [ 1336.708562] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.717764] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697257, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.748434] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.221s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.750871] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.997s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.752530] env[63297]: INFO nova.compute.claims [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1336.770854] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697256, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066419} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.770998] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1336.772041] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8e8e02-6e46-43ff-b875-7843bbc72b39 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.775547] env[63297]: INFO nova.scheduler.client.report [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Deleted allocations for instance 581f9d48-dcb8-4a34-928b-64087a9f966b [ 1336.800276] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 99cc8af3-5c18-4839-94db-996861e0c276/99cc8af3-5c18-4839-94db-996861e0c276.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1336.803452] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-757d2606-ac48-4061-92d7-85cc66339fb8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.805023] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb90cdba-36a5-451b-b05f-d1d7f3669710 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.833110] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3881111-4154-4628-8e18-e3466bcfbd1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.846056] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1336.846056] env[63297]: value = "task-1697258" [ 1336.846056] env[63297]: _type = "Task" [ 1336.846056] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.856485] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.870315] env[63297]: DEBUG nova.compute.manager [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Detach interface failed, port_id=6a0b5502-f624-47b8-b693-32a590e69f57, reason: Instance 2d7b237e-f86d-42b1-ab04-320f0012a2d1 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1336.870690] env[63297]: DEBUG nova.compute.manager [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Received event network-vif-deleted-614d9b29-4dd8-4ac5-bbb4-bb43593e3386 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1336.870889] env[63297]: INFO nova.compute.manager [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Neutron deleted interface 614d9b29-4dd8-4ac5-bbb4-bb43593e3386; detaching it from the instance and deleting it from the info cache [ 1336.871147] env[63297]: DEBUG nova.network.neutron [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.884709] env[63297]: DEBUG nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1336.939212] env[63297]: INFO nova.compute.manager [-] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Took 1.69 seconds to deallocate network for instance. [ 1336.956107] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e641633f-02bc-41b9-bce0-48908c38dda4 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.568s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.173107] env[63297]: DEBUG nova.compute.manager [req-ceea3165-45fa-456b-8658-1e1839e6c671 req-c4fca4d9-1fbc-440f-808e-c7cafe988ce6 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Received event network-vif-plugged-faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1337.173455] env[63297]: DEBUG oslo_concurrency.lockutils [req-ceea3165-45fa-456b-8658-1e1839e6c671 req-c4fca4d9-1fbc-440f-808e-c7cafe988ce6 service nova] Acquiring lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.175046] env[63297]: DEBUG oslo_concurrency.lockutils [req-ceea3165-45fa-456b-8658-1e1839e6c671 req-c4fca4d9-1fbc-440f-808e-c7cafe988ce6 service nova] Lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.175046] env[63297]: DEBUG oslo_concurrency.lockutils [req-ceea3165-45fa-456b-8658-1e1839e6c671 req-c4fca4d9-1fbc-440f-808e-c7cafe988ce6 service nova] Lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.175046] env[63297]: DEBUG nova.compute.manager [req-ceea3165-45fa-456b-8658-1e1839e6c671 req-c4fca4d9-1fbc-440f-808e-c7cafe988ce6 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] No waiting events found dispatching network-vif-plugged-faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1337.175046] env[63297]: WARNING nova.compute.manager [req-ceea3165-45fa-456b-8658-1e1839e6c671 req-c4fca4d9-1fbc-440f-808e-c7cafe988ce6 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Received unexpected event network-vif-plugged-faebe641-fe2b-4fc2-828b-4348d47ab8eb for instance with vm_state building and task_state block_device_mapping. [ 1337.221839] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697257, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.274301] env[63297]: DEBUG nova.network.neutron [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Successfully updated port: faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1337.306815] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52f7b113-1ba3-4e27-89f8-1cc889f27d6e tempest-AttachInterfacesV270Test-460133642 tempest-AttachInterfacesV270Test-460133642-project-member] Lock "581f9d48-dcb8-4a34-928b-64087a9f966b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.316s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.360182] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697258, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.377295] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-168d7f83-bc1d-470b-b8e4-e9d6736352d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.390637] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206ca98a-1baa-417c-b43e-b0ba8e299e25 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.441103] env[63297]: DEBUG nova.compute.manager [req-54e72c1f-cfa9-44a8-bd75-b6ef81734a64 req-47cea579-012b-47ff-9a7a-1d38c32c7c3b service nova] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Detach interface failed, port_id=614d9b29-4dd8-4ac5-bbb4-bb43593e3386, reason: Instance 2d7b237e-f86d-42b1-ab04-320f0012a2d1 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1337.443403] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.447684] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.464678] env[63297]: DEBUG nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1337.721879] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697257, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.779628] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Acquiring lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.779800] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Acquired lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.779943] env[63297]: DEBUG nova.network.neutron [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1337.865360] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697258, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.917290] env[63297]: DEBUG nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1337.918295] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1337.918295] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1337.918295] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1337.918438] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1337.921020] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1337.921020] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1337.921020] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1337.921020] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1337.921020] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1337.921020] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1337.921020] env[63297]: DEBUG nova.virt.hardware [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1337.921842] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534cd505-f29e-4b83-bec7-84c211e8647f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.932854] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d31da93-87f0-406f-a3ae-c124b053c882 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.958682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "dc196e68-b725-43a1-9848-e84d1b138245" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.959464] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "dc196e68-b725-43a1-9848-e84d1b138245" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.985209] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.228206] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697257, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.248519] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "fb33135a-073d-4d80-9833-5b29afae1cc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.248746] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "fb33135a-073d-4d80-9833-5b29afae1cc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.315024] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b465f26-d958-4e76-b01d-1d1c2634b24b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.322613] env[63297]: DEBUG nova.network.neutron [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1338.325480] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce97ff6-7f29-44f2-aa80-2fdb7186d2cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.365578] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eeb16d7-d756-4114-b9bd-fc304a528f13 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.373745] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697258, 'name': ReconfigVM_Task, 'duration_secs': 1.400347} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.375901] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 99cc8af3-5c18-4839-94db-996861e0c276/99cc8af3-5c18-4839-94db-996861e0c276.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1338.376582] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8a394b5-2ee8-43e3-afc1-c522945482b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.379189] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5704b15c-2bfe-4b43-8904-b376393d7043 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.394040] env[63297]: DEBUG nova.compute.provider_tree [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.398801] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1338.398801] env[63297]: value = "task-1697259" [ 1338.398801] env[63297]: _type = "Task" [ 1338.398801] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.408723] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697259, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.504358] env[63297]: DEBUG nova.network.neutron [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Updating instance_info_cache with network_info: [{"id": "faebe641-fe2b-4fc2-828b-4348d47ab8eb", "address": "fa:16:3e:a6:b8:9f", "network": {"id": "f4e36418-814d-420a-a079-4a0fd41e38a2", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1199928158-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f55b91c7fce9495aa8060f3f02ff7bda", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51bac3c3-00ab-4a07-9e28-b3c951dee565", "external-id": "nsx-vlan-transportzone-645", "segmentation_id": 645, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfaebe641-fe", "ovs_interfaceid": "faebe641-fe2b-4fc2-828b-4348d47ab8eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.726349] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697257, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.900535] env[63297]: DEBUG nova.scheduler.client.report [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1338.915696] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697259, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.006765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Releasing lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.007113] env[63297]: DEBUG nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Instance network_info: |[{"id": "faebe641-fe2b-4fc2-828b-4348d47ab8eb", "address": "fa:16:3e:a6:b8:9f", "network": {"id": "f4e36418-814d-420a-a079-4a0fd41e38a2", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1199928158-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f55b91c7fce9495aa8060f3f02ff7bda", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51bac3c3-00ab-4a07-9e28-b3c951dee565", "external-id": "nsx-vlan-transportzone-645", "segmentation_id": 645, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfaebe641-fe", "ovs_interfaceid": "faebe641-fe2b-4fc2-828b-4348d47ab8eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1339.007568] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:b8:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51bac3c3-00ab-4a07-9e28-b3c951dee565', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'faebe641-fe2b-4fc2-828b-4348d47ab8eb', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1339.015334] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Creating folder: Project (f55b91c7fce9495aa8060f3f02ff7bda). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1339.015981] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8947612c-2c08-4132-bb5b-8343a4ec3470 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.030196] env[63297]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1339.030397] env[63297]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63297) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1339.031814] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Folder already exists: Project (f55b91c7fce9495aa8060f3f02ff7bda). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1339.031814] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Creating folder: Instances. Parent ref: group-v353791. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1339.031814] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7919c20e-d1a7-4613-a296-42aa24cbd31f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.041941] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Created folder: Instances in parent group-v353791. [ 1339.042198] env[63297]: DEBUG oslo.service.loopingcall [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1339.042437] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1339.042658] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48ac6c25-2c96-4d87-9c31-e6bef113742d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.067359] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1339.067359] env[63297]: value = "task-1697262" [ 1339.067359] env[63297]: _type = "Task" [ 1339.067359] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.075471] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697262, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.215970] env[63297]: DEBUG nova.compute.manager [req-7e5acf4d-fa7f-4ada-8506-b4915663188f req-e4ffebc9-5a40-4819-a7c8-559d4f775891 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Received event network-changed-faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1339.216157] env[63297]: DEBUG nova.compute.manager [req-7e5acf4d-fa7f-4ada-8506-b4915663188f req-e4ffebc9-5a40-4819-a7c8-559d4f775891 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Refreshing instance network info cache due to event network-changed-faebe641-fe2b-4fc2-828b-4348d47ab8eb. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1339.216371] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e5acf4d-fa7f-4ada-8506-b4915663188f req-e4ffebc9-5a40-4819-a7c8-559d4f775891 service nova] Acquiring lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.216895] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e5acf4d-fa7f-4ada-8506-b4915663188f req-e4ffebc9-5a40-4819-a7c8-559d4f775891 service nova] Acquired lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.216895] env[63297]: DEBUG nova.network.neutron [req-7e5acf4d-fa7f-4ada-8506-b4915663188f req-e4ffebc9-5a40-4819-a7c8-559d4f775891 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Refreshing network info cache for port faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1339.229148] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697257, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.408940] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.409482] env[63297]: DEBUG nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1339.416625] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.992s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.418099] env[63297]: INFO nova.compute.claims [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1339.420747] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697259, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.579269] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697262, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.731434] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697257, 'name': CloneVM_Task, 'duration_secs': 2.920557} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.731716] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Created linked-clone VM from snapshot [ 1339.732563] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06750fdf-e4c2-4342-9a68-029db47cbc17 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.740374] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Uploading image 8a08122b-fd79-46eb-87fc-1e873beb9fe3 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1339.754035] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1339.754517] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9c46b3f4-dad3-46eb-8aa8-48e916cc233c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.765328] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1339.765328] env[63297]: value = "task-1697263" [ 1339.765328] env[63297]: _type = "Task" [ 1339.765328] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.777635] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697263, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.914408] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697259, 'name': Rename_Task, 'duration_secs': 1.152645} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.914707] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1339.914959] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb23329d-9968-4c60-9a0a-4b4645a23843 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.922594] env[63297]: DEBUG nova.compute.utils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1339.925711] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1339.925711] env[63297]: value = "task-1697264" [ 1339.925711] env[63297]: _type = "Task" [ 1339.925711] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.926285] env[63297]: DEBUG nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1339.926456] env[63297]: DEBUG nova.network.neutron [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1339.942012] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697264, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.988027] env[63297]: DEBUG nova.policy [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a20e0fe0bec4d2d92a217ac49722793', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '731a719bb2a44a53985d10e02f9397cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1340.079805] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697262, 'name': CreateVM_Task, 'duration_secs': 0.624157} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.079969] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1340.080760] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353799', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'name': 'volume-1cf3188d-0b66-4933-9595-057e902e5d2b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e6b1296-9e19-4047-9c38-dc94c686d0cb', 'attached_at': '', 'detached_at': '', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'serial': '1cf3188d-0b66-4933-9595-057e902e5d2b'}, 'mount_device': '/dev/sda', 'disk_bus': None, 'attachment_id': '156259cc-adf4-4941-b08c-450fb512d4b6', 'guest_format': None, 'device_type': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63297) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1340.081026] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Root volume attach. Driver type: vmdk {{(pid=63297) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1340.081823] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea6e0fe-324d-466c-9023-5ab1090aa46a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.089681] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2305e575-c148-417a-b13d-6d7132fe1eec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.096710] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afed45cd-8531-4a45-bc78-ec73a104b703 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.102485] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-aedacf3b-f6f8-431b-8c05-17ec8ef9d540 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.109466] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1340.109466] env[63297]: value = "task-1697265" [ 1340.109466] env[63297]: _type = "Task" [ 1340.109466] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.117365] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697265, 'name': RelocateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.144372] env[63297]: DEBUG nova.network.neutron [req-7e5acf4d-fa7f-4ada-8506-b4915663188f req-e4ffebc9-5a40-4819-a7c8-559d4f775891 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Updated VIF entry in instance network info cache for port faebe641-fe2b-4fc2-828b-4348d47ab8eb. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1340.144763] env[63297]: DEBUG nova.network.neutron [req-7e5acf4d-fa7f-4ada-8506-b4915663188f req-e4ffebc9-5a40-4819-a7c8-559d4f775891 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Updating instance_info_cache with network_info: [{"id": "faebe641-fe2b-4fc2-828b-4348d47ab8eb", "address": "fa:16:3e:a6:b8:9f", "network": {"id": "f4e36418-814d-420a-a079-4a0fd41e38a2", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1199928158-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f55b91c7fce9495aa8060f3f02ff7bda", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51bac3c3-00ab-4a07-9e28-b3c951dee565", "external-id": "nsx-vlan-transportzone-645", "segmentation_id": 645, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfaebe641-fe", "ovs_interfaceid": "faebe641-fe2b-4fc2-828b-4348d47ab8eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.276188] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697263, 'name': Destroy_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.404972] env[63297]: DEBUG nova.network.neutron [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Successfully created port: 1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1340.429855] env[63297]: DEBUG nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1340.442178] env[63297]: DEBUG oslo_vmware.api [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697264, 'name': PowerOnVM_Task, 'duration_secs': 0.485226} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.442528] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1340.442742] env[63297]: INFO nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Took 8.89 seconds to spawn the instance on the hypervisor. [ 1340.442917] env[63297]: DEBUG nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1340.443778] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1f2759-8795-4567-aa60-99f61211da10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.623658] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697265, 'name': RelocateVM_Task, 'duration_secs': 0.374554} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.627019] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1340.627019] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353799', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'name': 'volume-1cf3188d-0b66-4933-9595-057e902e5d2b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e6b1296-9e19-4047-9c38-dc94c686d0cb', 'attached_at': '', 'detached_at': '', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'serial': '1cf3188d-0b66-4933-9595-057e902e5d2b'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1340.627019] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bd66f5-cbd2-482d-9e86-33a675e87997 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.644895] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc00701-94ea-49af-b739-c1afa2bd42a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.647868] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e5acf4d-fa7f-4ada-8506-b4915663188f req-e4ffebc9-5a40-4819-a7c8-559d4f775891 service nova] Releasing lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.669168] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] volume-1cf3188d-0b66-4933-9595-057e902e5d2b/volume-1cf3188d-0b66-4933-9595-057e902e5d2b.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.671361] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b47b7077-6e47-442c-9ea1-6178cfd4c27c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.692593] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1340.692593] env[63297]: value = "task-1697266" [ 1340.692593] env[63297]: _type = "Task" [ 1340.692593] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.701628] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697266, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.781058] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697263, 'name': Destroy_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.954423] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e7848f-ac94-477e-b60e-0c6ff0eaa7c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.963281] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073e3051-14d5-425e-a1b9-29ab57bdcdf3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.000362] env[63297]: INFO nova.compute.manager [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Took 40.08 seconds to build instance. [ 1341.001986] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e2fcab-75c0-470c-9417-8d61fe8ffeff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.011411] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe72e40-e90b-4c4d-ab45-c7e02a212dd5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.026590] env[63297]: DEBUG nova.compute.provider_tree [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1341.202657] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697266, 'name': ReconfigVM_Task, 'duration_secs': 0.357415} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.202983] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Reconfigured VM instance instance-0000002c to attach disk [datastore1] volume-1cf3188d-0b66-4933-9595-057e902e5d2b/volume-1cf3188d-0b66-4933-9595-057e902e5d2b.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.207807] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-950b2ff6-206d-4340-be7b-26678a7eb056 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.223759] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1341.223759] env[63297]: value = "task-1697267" [ 1341.223759] env[63297]: _type = "Task" [ 1341.223759] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.234791] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697267, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.276682] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697263, 'name': Destroy_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.446047] env[63297]: DEBUG nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1341.479786] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1341.479901] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1341.480066] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.480245] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1341.480421] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.480575] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1341.480778] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1341.480928] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1341.481109] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1341.481271] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1341.481437] env[63297]: DEBUG nova.virt.hardware [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1341.482432] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61843cb9-cc8b-426d-aadf-cecb2f8af63a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.491802] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c786a1-ee79-4282-b9eb-852c718579b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.510824] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5b88dd3-65f9-4641-9c83-c8a740ae90af tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "99cc8af3-5c18-4839-94db-996861e0c276" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.764s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.549725] env[63297]: ERROR nova.scheduler.client.report [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [req-2da62f36-5a29-4774-ab9b-d4273938204d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2da62f36-5a29-4774-ab9b-d4273938204d"}]} [ 1341.569591] env[63297]: DEBUG nova.scheduler.client.report [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1341.591517] env[63297]: DEBUG nova.scheduler.client.report [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1341.591517] env[63297]: DEBUG nova.compute.provider_tree [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1341.613305] env[63297]: DEBUG nova.scheduler.client.report [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1341.634420] env[63297]: DEBUG nova.scheduler.client.report [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1341.736306] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697267, 'name': ReconfigVM_Task, 'duration_secs': 0.169595} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.737068] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353799', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'name': 'volume-1cf3188d-0b66-4933-9595-057e902e5d2b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e6b1296-9e19-4047-9c38-dc94c686d0cb', 'attached_at': '', 'detached_at': '', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'serial': '1cf3188d-0b66-4933-9595-057e902e5d2b'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1341.740379] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c2e6560-fe0a-4e6f-99d8-da5289312729 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.748585] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1341.748585] env[63297]: value = "task-1697268" [ 1341.748585] env[63297]: _type = "Task" [ 1341.748585] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.760756] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697268, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.783060] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697263, 'name': Destroy_Task, 'duration_secs': 1.521058} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.784234] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Destroyed the VM [ 1341.784234] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1341.784234] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-aa6a08d5-65ca-460d-8cf7-5df6966785f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.792912] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1341.792912] env[63297]: value = "task-1697269" [ 1341.792912] env[63297]: _type = "Task" [ 1341.792912] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.803405] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697269, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.982753] env[63297]: DEBUG nova.compute.manager [req-5e666bd0-2fa7-418e-bf12-929baa09d7a8 req-1d9d1dfd-0533-43c2-a96a-aa575a6c5247 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Received event network-vif-plugged-1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1341.982753] env[63297]: DEBUG oslo_concurrency.lockutils [req-5e666bd0-2fa7-418e-bf12-929baa09d7a8 req-1d9d1dfd-0533-43c2-a96a-aa575a6c5247 service nova] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.982753] env[63297]: DEBUG oslo_concurrency.lockutils [req-5e666bd0-2fa7-418e-bf12-929baa09d7a8 req-1d9d1dfd-0533-43c2-a96a-aa575a6c5247 service nova] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.982753] env[63297]: DEBUG oslo_concurrency.lockutils [req-5e666bd0-2fa7-418e-bf12-929baa09d7a8 req-1d9d1dfd-0533-43c2-a96a-aa575a6c5247 service nova] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.982753] env[63297]: DEBUG nova.compute.manager [req-5e666bd0-2fa7-418e-bf12-929baa09d7a8 req-1d9d1dfd-0533-43c2-a96a-aa575a6c5247 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] No waiting events found dispatching network-vif-plugged-1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1341.982753] env[63297]: WARNING nova.compute.manager [req-5e666bd0-2fa7-418e-bf12-929baa09d7a8 req-1d9d1dfd-0533-43c2-a96a-aa575a6c5247 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Received unexpected event network-vif-plugged-1b123801-2747-40a9-84bc-ae5dc9595556 for instance with vm_state building and task_state spawning. [ 1342.016891] env[63297]: DEBUG nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1342.097480] env[63297]: DEBUG nova.network.neutron [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Successfully updated port: 1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1342.246614] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb58f74-a8b5-43fb-8643-2c8b2d31cf0f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.269207] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b5d440-c176-40c0-8c9b-438fba06f02a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.273029] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697268, 'name': Rename_Task, 'duration_secs': 0.160911} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.273335] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.273991] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0268cdb6-652b-4e42-8147-6f74e7ee4da3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.312588] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711da765-6a55-4ca1-ad5d-1a2336b274c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.315438] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1342.315438] env[63297]: value = "task-1697270" [ 1342.315438] env[63297]: _type = "Task" [ 1342.315438] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.326607] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697269, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.327150] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b8c3e5-6f01-4761-80dc-523e472b0cc8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.334616] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697270, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.345929] env[63297]: DEBUG nova.compute.provider_tree [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.544959] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.601256] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.601256] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.601456] env[63297]: DEBUG nova.network.neutron [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1342.815849] env[63297]: DEBUG oslo_vmware.api [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697269, 'name': RemoveSnapshot_Task, 'duration_secs': 0.944814} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.816247] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1342.830051] env[63297]: DEBUG oslo_vmware.api [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697270, 'name': PowerOnVM_Task, 'duration_secs': 0.50296} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.830361] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1342.830909] env[63297]: INFO nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Took 4.91 seconds to spawn the instance on the hypervisor. [ 1342.830909] env[63297]: DEBUG nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1342.831594] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e651cb24-5f1d-4317-ab23-11bdeedec9d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.850918] env[63297]: DEBUG nova.scheduler.client.report [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1343.153992] env[63297]: DEBUG nova.network.neutron [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1343.328183] env[63297]: WARNING nova.compute.manager [None req-d18b5f8a-064c-4506-b969-893eabaffe83 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Image not found during snapshot: nova.exception.ImageNotFound: Image 8a08122b-fd79-46eb-87fc-1e873beb9fe3 could not be found. [ 1343.341497] env[63297]: DEBUG nova.network.neutron [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updating instance_info_cache with network_info: [{"id": "1b123801-2747-40a9-84bc-ae5dc9595556", "address": "fa:16:3e:42:cf:1d", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b123801-27", "ovs_interfaceid": "1b123801-2747-40a9-84bc-ae5dc9595556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.366204] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.949s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.366347] env[63297]: DEBUG nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1343.371366] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.717s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.372498] env[63297]: DEBUG nova.objects.instance [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lazy-loading 'resources' on Instance uuid eebcad60-4b8a-4fa0-b846-b65972c4c69c {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1343.373022] env[63297]: INFO nova.compute.manager [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Took 39.92 seconds to build instance. [ 1343.845024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Releasing lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.845371] env[63297]: DEBUG nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Instance network_info: |[{"id": "1b123801-2747-40a9-84bc-ae5dc9595556", "address": "fa:16:3e:42:cf:1d", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b123801-27", "ovs_interfaceid": "1b123801-2747-40a9-84bc-ae5dc9595556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1343.845801] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:cf:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b123801-2747-40a9-84bc-ae5dc9595556', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1343.854962] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Creating folder: Project (731a719bb2a44a53985d10e02f9397cb). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1343.855755] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5790bc0-1708-465b-9099-b5b4104b7781 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.870020] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Created folder: Project (731a719bb2a44a53985d10e02f9397cb) in parent group-v353718. [ 1343.870020] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Creating folder: Instances. Parent ref: group-v353853. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1343.870020] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75a9bc4e-23c9-4054-bbab-239f73c989a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.878243] env[63297]: DEBUG nova.compute.utils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1343.880097] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3aa5998e-3afa-45ee-929f-ff5a7b72fbdd tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.978s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.883032] env[63297]: DEBUG nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1343.883032] env[63297]: DEBUG nova.network.neutron [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1343.886230] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Created folder: Instances in parent group-v353853. [ 1343.886631] env[63297]: DEBUG oslo.service.loopingcall [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1343.887111] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1343.887797] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a77387a8-f8f7-43fa-a4c7-695307f78a5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.924452] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1343.924452] env[63297]: value = "task-1697273" [ 1343.924452] env[63297]: _type = "Task" [ 1343.924452] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.932819] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697273, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.954034] env[63297]: DEBUG nova.policy [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080894c6e46d4686b5bfd67e5eddbe2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20f891cd9bb546b9bfe8095234165327', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1344.042174] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "c147f97d-7fae-4364-a9c0-04978df2450f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.042174] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c147f97d-7fae-4364-a9c0-04978df2450f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.075037] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.075086] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.153042] env[63297]: DEBUG nova.compute.manager [req-57758914-a63c-4993-b2e7-72edbf0aee46 req-81b4fec7-67cc-4f7f-ad22-7054e3ad7429 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Received event network-changed-1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1344.153414] env[63297]: DEBUG nova.compute.manager [req-57758914-a63c-4993-b2e7-72edbf0aee46 req-81b4fec7-67cc-4f7f-ad22-7054e3ad7429 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Refreshing instance network info cache due to event network-changed-1b123801-2747-40a9-84bc-ae5dc9595556. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1344.155166] env[63297]: DEBUG oslo_concurrency.lockutils [req-57758914-a63c-4993-b2e7-72edbf0aee46 req-81b4fec7-67cc-4f7f-ad22-7054e3ad7429 service nova] Acquiring lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.155166] env[63297]: DEBUG oslo_concurrency.lockutils [req-57758914-a63c-4993-b2e7-72edbf0aee46 req-81b4fec7-67cc-4f7f-ad22-7054e3ad7429 service nova] Acquired lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.155166] env[63297]: DEBUG nova.network.neutron [req-57758914-a63c-4993-b2e7-72edbf0aee46 req-81b4fec7-67cc-4f7f-ad22-7054e3ad7429 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Refreshing network info cache for port 1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1344.270661] env[63297]: DEBUG oslo_concurrency.lockutils [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.270724] env[63297]: DEBUG oslo_concurrency.lockutils [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.270875] env[63297]: DEBUG nova.compute.manager [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1344.271841] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5d4122-ad16-4d54-afcf-405394c6d788 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.285489] env[63297]: DEBUG nova.compute.manager [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1344.286098] env[63297]: DEBUG nova.objects.instance [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lazy-loading 'flavor' on Instance uuid f429dd9b-be6c-4e90-876b-3a3931fb1c4a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1344.307802] env[63297]: DEBUG nova.network.neutron [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Successfully created port: 701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1344.383654] env[63297]: DEBUG nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1344.388254] env[63297]: DEBUG nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1344.436838] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697273, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.496199] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbb23f5-e1dc-4b10-8905-5be7dc5a51b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.503412] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba5921b-6b49-4573-876b-e3e0f0a952cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.543010] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bce8925-099c-425f-86c2-f62a337e5044 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.548760] env[63297]: DEBUG nova.compute.manager [req-35246935-d68e-4a7e-8c5b-7e3686deb70d req-5a42ab0b-3484-4a2c-9b5d-ad7fe1847b2d service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Received event network-changed-faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1344.549176] env[63297]: DEBUG nova.compute.manager [req-35246935-d68e-4a7e-8c5b-7e3686deb70d req-5a42ab0b-3484-4a2c-9b5d-ad7fe1847b2d service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Refreshing instance network info cache due to event network-changed-faebe641-fe2b-4fc2-828b-4348d47ab8eb. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1344.549583] env[63297]: DEBUG oslo_concurrency.lockutils [req-35246935-d68e-4a7e-8c5b-7e3686deb70d req-5a42ab0b-3484-4a2c-9b5d-ad7fe1847b2d service nova] Acquiring lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.549828] env[63297]: DEBUG oslo_concurrency.lockutils [req-35246935-d68e-4a7e-8c5b-7e3686deb70d req-5a42ab0b-3484-4a2c-9b5d-ad7fe1847b2d service nova] Acquired lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.550108] env[63297]: DEBUG nova.network.neutron [req-35246935-d68e-4a7e-8c5b-7e3686deb70d req-5a42ab0b-3484-4a2c-9b5d-ad7fe1847b2d service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Refreshing network info cache for port faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1344.557908] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d5fdbb-794c-4707-ab8e-1f4bb283029c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.573805] env[63297]: DEBUG nova.compute.provider_tree [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.737334] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.737524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.737802] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.738272] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.738348] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.740575] env[63297]: INFO nova.compute.manager [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Terminating instance [ 1344.742609] env[63297]: DEBUG nova.compute.manager [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1344.742817] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1344.743759] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e919ac91-8ebc-47a4-9774-ee46eb9354b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.753592] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1344.753856] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-500bdaff-3dcb-46c5-a10d-176b3efb687e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.761834] env[63297]: DEBUG oslo_vmware.api [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1344.761834] env[63297]: value = "task-1697274" [ 1344.761834] env[63297]: _type = "Task" [ 1344.761834] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.770647] env[63297]: DEBUG oslo_vmware.api [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697274, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.793531] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1344.793813] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-549991e3-56be-4646-a65a-365ce712b341 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.804873] env[63297]: DEBUG oslo_vmware.api [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1344.804873] env[63297]: value = "task-1697275" [ 1344.804873] env[63297]: _type = "Task" [ 1344.804873] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.819782] env[63297]: DEBUG oslo_vmware.api [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697275, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.916556] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.938864] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697273, 'name': CreateVM_Task, 'duration_secs': 0.65761} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.939108] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1344.939849] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.939932] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.940281] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1344.940557] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcd4e0a4-76dd-48a0-a087-6df2b64b87bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.945607] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1344.945607] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c3c0c0-ddb0-572c-4822-f165466a660e" [ 1344.945607] env[63297]: _type = "Task" [ 1344.945607] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.946461] env[63297]: DEBUG nova.network.neutron [req-57758914-a63c-4993-b2e7-72edbf0aee46 req-81b4fec7-67cc-4f7f-ad22-7054e3ad7429 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updated VIF entry in instance network info cache for port 1b123801-2747-40a9-84bc-ae5dc9595556. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1344.946828] env[63297]: DEBUG nova.network.neutron [req-57758914-a63c-4993-b2e7-72edbf0aee46 req-81b4fec7-67cc-4f7f-ad22-7054e3ad7429 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updating instance_info_cache with network_info: [{"id": "1b123801-2747-40a9-84bc-ae5dc9595556", "address": "fa:16:3e:42:cf:1d", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b123801-27", "ovs_interfaceid": "1b123801-2747-40a9-84bc-ae5dc9595556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.956560] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c3c0c0-ddb0-572c-4822-f165466a660e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.077973] env[63297]: DEBUG nova.scheduler.client.report [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1345.272296] env[63297]: DEBUG oslo_vmware.api [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697274, 'name': PowerOffVM_Task, 'duration_secs': 0.376672} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.272656] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1345.272834] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1345.273097] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14c86b82-7cf0-41bd-890c-46d667ffc230 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.316033] env[63297]: DEBUG oslo_vmware.api [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697275, 'name': PowerOffVM_Task, 'duration_secs': 0.337489} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.316984] env[63297]: DEBUG nova.network.neutron [req-35246935-d68e-4a7e-8c5b-7e3686deb70d req-5a42ab0b-3484-4a2c-9b5d-ad7fe1847b2d service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Updated VIF entry in instance network info cache for port faebe641-fe2b-4fc2-828b-4348d47ab8eb. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1345.317326] env[63297]: DEBUG nova.network.neutron [req-35246935-d68e-4a7e-8c5b-7e3686deb70d req-5a42ab0b-3484-4a2c-9b5d-ad7fe1847b2d service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Updating instance_info_cache with network_info: [{"id": "faebe641-fe2b-4fc2-828b-4348d47ab8eb", "address": "fa:16:3e:a6:b8:9f", "network": {"id": "f4e36418-814d-420a-a079-4a0fd41e38a2", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1199928158-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f55b91c7fce9495aa8060f3f02ff7bda", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51bac3c3-00ab-4a07-9e28-b3c951dee565", "external-id": "nsx-vlan-transportzone-645", "segmentation_id": 645, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfaebe641-fe", "ovs_interfaceid": "faebe641-fe2b-4fc2-828b-4348d47ab8eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.318602] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1345.318784] env[63297]: DEBUG nova.compute.manager [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1345.319800] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f90b0c-a390-4070-bd9b-14356c9f10ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.374522] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1345.374765] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1345.374943] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleting the datastore file [datastore1] d15a7e98-755b-4c5c-ba34-dc5fc3f8846d {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1345.375214] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6f39e39-c25a-45c6-a4b9-16357631fdc7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.382521] env[63297]: DEBUG oslo_vmware.api [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1345.382521] env[63297]: value = "task-1697277" [ 1345.382521] env[63297]: _type = "Task" [ 1345.382521] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.391975] env[63297]: DEBUG oslo_vmware.api [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.394210] env[63297]: DEBUG nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1345.421878] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1345.422139] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1345.422308] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1345.422574] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1345.422728] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1345.422876] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1345.423107] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1345.423273] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1345.423874] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1345.423874] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1345.423874] env[63297]: DEBUG nova.virt.hardware [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1345.424768] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27481e9-6f9a-4106-94f7-cc88ed78389c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.432911] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc365aa9-8d34-4270-8582-7ada92570eae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.452392] env[63297]: DEBUG oslo_concurrency.lockutils [req-57758914-a63c-4993-b2e7-72edbf0aee46 req-81b4fec7-67cc-4f7f-ad22-7054e3ad7429 service nova] Releasing lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.462058] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c3c0c0-ddb0-572c-4822-f165466a660e, 'name': SearchDatastore_Task, 'duration_secs': 0.01011} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.462058] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.462058] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.462058] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.462058] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.462058] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.462058] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-193ae84a-75a4-41a2-ae6e-0bc5e5ef811b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.478484] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.478687] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.482023] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a9e9ce-a705-4254-8ddd-c877acb301ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.486447] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1345.486447] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238f02b-3ade-3793-83f2-427daa2c830c" [ 1345.486447] env[63297]: _type = "Task" [ 1345.486447] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.494373] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238f02b-3ade-3793-83f2-427daa2c830c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.583339] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.586592] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.651s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.586592] env[63297]: DEBUG nova.objects.instance [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lazy-loading 'resources' on Instance uuid 92439795-6240-4103-940b-de6d87738570 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1345.620330] env[63297]: INFO nova.scheduler.client.report [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleted allocations for instance eebcad60-4b8a-4fa0-b846-b65972c4c69c [ 1345.820701] env[63297]: DEBUG oslo_concurrency.lockutils [req-35246935-d68e-4a7e-8c5b-7e3686deb70d req-5a42ab0b-3484-4a2c-9b5d-ad7fe1847b2d service nova] Releasing lock "refresh_cache-4e6b1296-9e19-4047-9c38-dc94c686d0cb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.833168] env[63297]: DEBUG oslo_concurrency.lockutils [None req-199d03f7-2535-4916-917e-f6ae59611b0b tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.562s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.893569] env[63297]: DEBUG oslo_vmware.api [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241801} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.893878] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1345.894082] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1345.894260] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1345.894456] env[63297]: INFO nova.compute.manager [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1345.894673] env[63297]: DEBUG oslo.service.loopingcall [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.894898] env[63297]: DEBUG nova.compute.manager [-] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1345.894995] env[63297]: DEBUG nova.network.neutron [-] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1345.980586] env[63297]: DEBUG nova.network.neutron [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Successfully updated port: 701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1345.997414] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238f02b-3ade-3793-83f2-427daa2c830c, 'name': SearchDatastore_Task, 'duration_secs': 0.017764} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.998295] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-306e7286-ba13-4845-ab23-9eb01d3371b9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.006206] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1346.006206] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52305be4-bd1e-b6fa-9069-eb9600222f77" [ 1346.006206] env[63297]: _type = "Task" [ 1346.006206] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.015850] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52305be4-bd1e-b6fa-9069-eb9600222f77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.131965] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39fc6f31-3610-44b5-9531-60364798a2c8 tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "eebcad60-4b8a-4fa0-b846-b65972c4c69c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.896s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.191825] env[63297]: DEBUG nova.compute.manager [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Received event network-vif-plugged-701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1346.191825] env[63297]: DEBUG oslo_concurrency.lockutils [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] Acquiring lock "272180b9-e79b-4714-b28b-470937509f42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.191825] env[63297]: DEBUG oslo_concurrency.lockutils [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] Lock "272180b9-e79b-4714-b28b-470937509f42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.192385] env[63297]: DEBUG oslo_concurrency.lockutils [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] Lock "272180b9-e79b-4714-b28b-470937509f42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.192385] env[63297]: DEBUG nova.compute.manager [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] No waiting events found dispatching network-vif-plugged-701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1346.192507] env[63297]: WARNING nova.compute.manager [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Received unexpected event network-vif-plugged-701615f1-5ab7-4150-8c46-9df42e68f228 for instance with vm_state building and task_state spawning. [ 1346.192715] env[63297]: DEBUG nova.compute.manager [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Received event network-changed-701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1346.192964] env[63297]: DEBUG nova.compute.manager [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Refreshing instance network info cache due to event network-changed-701615f1-5ab7-4150-8c46-9df42e68f228. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1346.193918] env[63297]: DEBUG oslo_concurrency.lockutils [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] Acquiring lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.193918] env[63297]: DEBUG oslo_concurrency.lockutils [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] Acquired lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.193918] env[63297]: DEBUG nova.network.neutron [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Refreshing network info cache for port 701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1346.483152] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.516683] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52305be4-bd1e-b6fa-9069-eb9600222f77, 'name': SearchDatastore_Task, 'duration_secs': 0.030695} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.519051] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.519416] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5124f7fb-1293-4964-98c4-426ecfce7d10/5124f7fb-1293-4964-98c4-426ecfce7d10.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.519845] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98ab820f-d7de-49ee-80bb-9d0cacff49bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.526405] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1346.526405] env[63297]: value = "task-1697278" [ 1346.526405] env[63297]: _type = "Task" [ 1346.526405] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.536230] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.570830] env[63297]: DEBUG nova.compute.manager [req-a7a6c74f-86ca-4738-b4e1-59a119beef9a req-dd4bde5b-ea48-4c45-a98e-f2fcda73a995 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Received event network-vif-deleted-fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1346.570923] env[63297]: INFO nova.compute.manager [req-a7a6c74f-86ca-4738-b4e1-59a119beef9a req-dd4bde5b-ea48-4c45-a98e-f2fcda73a995 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Neutron deleted interface fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc; detaching it from the instance and deleting it from the info cache [ 1346.571166] env[63297]: DEBUG nova.network.neutron [req-a7a6c74f-86ca-4738-b4e1-59a119beef9a req-dd4bde5b-ea48-4c45-a98e-f2fcda73a995 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.574637] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b5ae32-bbda-45f7-941b-61f48991df7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.583843] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68eb462-bafd-4769-8cac-a95253c51f46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.618387] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04df51f5-e75a-4cf7-81cd-00dbe2c61538 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.626545] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26168ce9-9fa1-43d3-9f4c-7ccff6f5ead2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.641897] env[63297]: DEBUG nova.compute.provider_tree [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.688232] env[63297]: DEBUG nova.network.neutron [-] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.735118] env[63297]: DEBUG nova.network.neutron [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1346.841684] env[63297]: DEBUG nova.network.neutron [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.966917] env[63297]: DEBUG nova.objects.instance [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lazy-loading 'flavor' on Instance uuid f429dd9b-be6c-4e90-876b-3a3931fb1c4a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.038222] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697278, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.073864] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-855e337a-5a7d-43e1-912a-aa38822c7d56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.083802] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe3ad5a-b68f-4fd6-9e3e-57e438f5658a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.119794] env[63297]: DEBUG nova.compute.manager [req-a7a6c74f-86ca-4738-b4e1-59a119beef9a req-dd4bde5b-ea48-4c45-a98e-f2fcda73a995 service nova] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Detach interface failed, port_id=fb21185a-d6d3-4d40-95d5-fcfdcba5b0bc, reason: Instance d15a7e98-755b-4c5c-ba34-dc5fc3f8846d could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1347.145559] env[63297]: DEBUG nova.scheduler.client.report [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1347.190905] env[63297]: INFO nova.compute.manager [-] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Took 1.30 seconds to deallocate network for instance. [ 1347.346024] env[63297]: DEBUG oslo_concurrency.lockutils [req-115369c5-3f8e-450c-b276-0dcf7903b5fe req-e268ebfa-b94c-4a74-a203-7df67786d82c service nova] Releasing lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.346024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquired lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.346161] env[63297]: DEBUG nova.network.neutron [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1347.472657] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.472840] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquired lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.473053] env[63297]: DEBUG nova.network.neutron [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1347.473237] env[63297]: DEBUG nova.objects.instance [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lazy-loading 'info_cache' on Instance uuid f429dd9b-be6c-4e90-876b-3a3931fb1c4a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.537431] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697278, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593526} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.537698] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5124f7fb-1293-4964-98c4-426ecfce7d10/5124f7fb-1293-4964-98c4-426ecfce7d10.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1347.537907] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1347.538197] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eec53e5a-a04d-43a3-aa54-eb6536e5cf3a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.545247] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1347.545247] env[63297]: value = "task-1697279" [ 1347.545247] env[63297]: _type = "Task" [ 1347.545247] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.552708] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.651234] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.065s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.653591] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.124s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.656089] env[63297]: INFO nova.compute.claims [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1347.673187] env[63297]: INFO nova.scheduler.client.report [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Deleted allocations for instance 92439795-6240-4103-940b-de6d87738570 [ 1347.697957] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.888161] env[63297]: DEBUG nova.network.neutron [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1347.978252] env[63297]: DEBUG nova.objects.base [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1348.055819] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18899} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.056153] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1348.056908] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830d79b7-103f-47fa-9316-c7718fde1c29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.080917] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 5124f7fb-1293-4964-98c4-426ecfce7d10/5124f7fb-1293-4964-98c4-426ecfce7d10.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1348.081377] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-206d8c9f-01a1-4d70-abaf-5c848120bea2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.101405] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1348.101405] env[63297]: value = "task-1697280" [ 1348.101405] env[63297]: _type = "Task" [ 1348.101405] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.110273] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697280, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.180997] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f80f51e-52a7-40f1-8c00-f893c98e744d tempest-ListServersNegativeTestJSON-1586772530 tempest-ListServersNegativeTestJSON-1586772530-project-member] Lock "92439795-6240-4103-940b-de6d87738570" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.953s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.325353] env[63297]: DEBUG nova.network.neutron [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Updating instance_info_cache with network_info: [{"id": "701615f1-5ab7-4150-8c46-9df42e68f228", "address": "fa:16:3e:76:43:67", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap701615f1-5a", "ovs_interfaceid": "701615f1-5ab7-4150-8c46-9df42e68f228", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.611963] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.692084] env[63297]: DEBUG nova.network.neutron [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Updating instance_info_cache with network_info: [{"id": "80a2f749-1281-4f8d-853e-5d5bca529aa3", "address": "fa:16:3e:60:3e:20", "network": {"id": "a7957e8b-899a-4b5d-b0bb-59a0bd5a4ae8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1195807447-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe4661ce8d764c42a57538f780b686ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80a2f749-12", "ovs_interfaceid": "80a2f749-1281-4f8d-853e-5d5bca529aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.832222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Releasing lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.832645] env[63297]: DEBUG nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Instance network_info: |[{"id": "701615f1-5ab7-4150-8c46-9df42e68f228", "address": "fa:16:3e:76:43:67", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap701615f1-5a", "ovs_interfaceid": "701615f1-5ab7-4150-8c46-9df42e68f228", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1348.833073] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:43:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '701615f1-5ab7-4150-8c46-9df42e68f228', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1348.840958] env[63297]: DEBUG oslo.service.loopingcall [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1348.844179] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 272180b9-e79b-4714-b28b-470937509f42] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1348.847232] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-246c2300-5df1-47e4-b129-2a0d1e5a2e5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.876543] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1348.876543] env[63297]: value = "task-1697281" [ 1348.876543] env[63297]: _type = "Task" [ 1348.876543] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.885050] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697281, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.114703] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697280, 'name': ReconfigVM_Task, 'duration_secs': 0.903706} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.117353] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 5124f7fb-1293-4964-98c4-426ecfce7d10/5124f7fb-1293-4964-98c4-426ecfce7d10.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1349.118171] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b6c26a3-b6f8-406d-a00f-74b554df0d81 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.125039] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1349.125039] env[63297]: value = "task-1697282" [ 1349.125039] env[63297]: _type = "Task" [ 1349.125039] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.136767] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697282, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.192944] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feded711-8d80-4d1d-8a5f-d49e7f73d4c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.196058] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Releasing lock "refresh_cache-f429dd9b-be6c-4e90-876b-3a3931fb1c4a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.202095] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21eac074-05b1-4662-a3fd-b4fe58e2a2f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.232912] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9d300c-fe07-49bf-ba4a-a422327e0778 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.241201] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8d0bc1-bec6-4a23-8bc0-0f8d92adacf5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.256926] env[63297]: DEBUG nova.compute.provider_tree [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.387799] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697281, 'name': CreateVM_Task, 'duration_secs': 0.321005} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.387968] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 272180b9-e79b-4714-b28b-470937509f42] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1349.389126] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.389298] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.389634] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1349.389899] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07d3598f-be9b-4ff7-9c9c-803d3c0d2667 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.395172] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1349.395172] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52619667-32e3-10f1-522f-e1c42c5da62b" [ 1349.395172] env[63297]: _type = "Task" [ 1349.395172] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.404333] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52619667-32e3-10f1-522f-e1c42c5da62b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.637411] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697282, 'name': Rename_Task, 'duration_secs': 0.243369} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.637858] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1349.638217] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-def5593e-ba1f-49dc-a80a-51e70dab6025 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.644598] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1349.644598] env[63297]: value = "task-1697283" [ 1349.644598] env[63297]: _type = "Task" [ 1349.644598] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.652730] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.700062] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1349.700344] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fc2555c-fabe-4be6-9348-cedbcd1daffc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.708011] env[63297]: DEBUG oslo_vmware.api [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1349.708011] env[63297]: value = "task-1697284" [ 1349.708011] env[63297]: _type = "Task" [ 1349.708011] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.716711] env[63297]: DEBUG oslo_vmware.api [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.759902] env[63297]: DEBUG nova.scheduler.client.report [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1349.914084] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52619667-32e3-10f1-522f-e1c42c5da62b, 'name': SearchDatastore_Task, 'duration_secs': 0.011847} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.914084] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.914084] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1349.914084] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.914084] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.914302] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1349.914419] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-890550d5-7caa-4508-8aab-f165792ea942 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.926280] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1349.926476] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1349.927438] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8958f43d-de0d-4a19-b76e-712772582f1d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.933855] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1349.933855] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d8f3c-0768-7123-7ddb-f0faac68bc60" [ 1349.933855] env[63297]: _type = "Task" [ 1349.933855] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.942291] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d8f3c-0768-7123-7ddb-f0faac68bc60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.156495] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697283, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.219799] env[63297]: DEBUG oslo_vmware.api [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697284, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.267617] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.267617] env[63297]: DEBUG nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1350.269839] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.420s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.270649] env[63297]: DEBUG nova.objects.instance [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lazy-loading 'resources' on Instance uuid 8bc4bb67-bc00-44c6-9c83-c0a1072142e6 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1350.446156] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d8f3c-0768-7123-7ddb-f0faac68bc60, 'name': SearchDatastore_Task, 'duration_secs': 0.010255} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.448351] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f0363b1-305f-44d6-8427-af2904a7189e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.453578] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1350.453578] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a4cf5e-5fbb-5c4f-e3dc-40fa5a098995" [ 1350.453578] env[63297]: _type = "Task" [ 1350.453578] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.462530] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a4cf5e-5fbb-5c4f-e3dc-40fa5a098995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.656584] env[63297]: DEBUG oslo_vmware.api [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697283, 'name': PowerOnVM_Task, 'duration_secs': 0.662825} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.656894] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.657111] env[63297]: INFO nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Took 9.21 seconds to spawn the instance on the hypervisor. [ 1350.657346] env[63297]: DEBUG nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1350.658160] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa64d20-6d1a-4853-9d95-b5808d504859 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.720671] env[63297]: DEBUG oslo_vmware.api [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697284, 'name': PowerOnVM_Task, 'duration_secs': 0.546944} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.721812] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.721812] env[63297]: DEBUG nova.compute.manager [None req-b38a17c8-8d64-42f0-b069-396ba7d10289 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1350.721935] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a20659-c567-4f55-8bf4-2060b58be704 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.774700] env[63297]: DEBUG nova.compute.utils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1350.776321] env[63297]: DEBUG nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1350.776483] env[63297]: DEBUG nova.network.neutron [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1350.857605] env[63297]: DEBUG nova.policy [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '791c4d84105d4db48f37499dfedba84e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a54071d8478e49b4a519ace919960911', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1350.974144] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a4cf5e-5fbb-5c4f-e3dc-40fa5a098995, 'name': SearchDatastore_Task, 'duration_secs': 0.012148} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.978498] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.978796] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 272180b9-e79b-4714-b28b-470937509f42/272180b9-e79b-4714-b28b-470937509f42.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1350.979686] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86275ceb-e99f-44ae-b9fb-ea2897dfd58c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.987370] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1350.987370] env[63297]: value = "task-1697285" [ 1350.987370] env[63297]: _type = "Task" [ 1350.987370] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.999580] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.177110] env[63297]: INFO nova.compute.manager [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Took 32.47 seconds to build instance. [ 1351.250124] env[63297]: DEBUG nova.network.neutron [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Successfully created port: 8461adeb-9363-4fea-ba94-2dfe3cc6305c {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1351.279431] env[63297]: DEBUG nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1351.320173] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23ee275-4feb-4088-9bbe-b6513bd61f5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.332925] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c72082-6724-4651-b3d6-581c87872a88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.369071] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708f0c29-4c42-44f0-b9eb-1a05feaeaf3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.379630] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e45eb68-9a83-4d7a-958e-3aadfc6bdb64 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.396779] env[63297]: DEBUG nova.compute.provider_tree [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1351.500266] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697285, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.679166] env[63297]: DEBUG oslo_concurrency.lockutils [None req-844549c8-8e90-4120-a09c-c5d1e8813038 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.556s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.902771] env[63297]: DEBUG nova.scheduler.client.report [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1351.955550] env[63297]: DEBUG nova.compute.manager [req-330c8e95-9126-48d6-97e2-a3f0a3ebbd33 req-ca20846a-6ce0-4718-9603-157fe4821b07 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Received event network-changed-1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1351.955785] env[63297]: DEBUG nova.compute.manager [req-330c8e95-9126-48d6-97e2-a3f0a3ebbd33 req-ca20846a-6ce0-4718-9603-157fe4821b07 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Refreshing instance network info cache due to event network-changed-1b123801-2747-40a9-84bc-ae5dc9595556. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1351.956218] env[63297]: DEBUG oslo_concurrency.lockutils [req-330c8e95-9126-48d6-97e2-a3f0a3ebbd33 req-ca20846a-6ce0-4718-9603-157fe4821b07 service nova] Acquiring lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.956443] env[63297]: DEBUG oslo_concurrency.lockutils [req-330c8e95-9126-48d6-97e2-a3f0a3ebbd33 req-ca20846a-6ce0-4718-9603-157fe4821b07 service nova] Acquired lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.956636] env[63297]: DEBUG nova.network.neutron [req-330c8e95-9126-48d6-97e2-a3f0a3ebbd33 req-ca20846a-6ce0-4718-9603-157fe4821b07 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Refreshing network info cache for port 1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1352.001342] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.753928} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.001787] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 272180b9-e79b-4714-b28b-470937509f42/272180b9-e79b-4714-b28b-470937509f42.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1352.002048] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1352.002324] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5dfc8673-2223-4216-85ac-1556958489a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.011554] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1352.011554] env[63297]: value = "task-1697286" [ 1352.011554] env[63297]: _type = "Task" [ 1352.011554] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.021182] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697286, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.185274] env[63297]: DEBUG nova.compute.manager [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1352.260067] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquiring lock "754e64ec-b6fa-49d8-9de6-ef38918378fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.260340] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "754e64ec-b6fa-49d8-9de6-ef38918378fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.260541] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquiring lock "754e64ec-b6fa-49d8-9de6-ef38918378fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.260719] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "754e64ec-b6fa-49d8-9de6-ef38918378fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.262887] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "754e64ec-b6fa-49d8-9de6-ef38918378fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.266565] env[63297]: INFO nova.compute.manager [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Terminating instance [ 1352.267919] env[63297]: DEBUG nova.compute.manager [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1352.267919] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1352.267919] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e04242-72b0-4197-b560-075bf5927925 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.275911] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1352.276167] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d13d554-3cc1-42b3-bd6c-8df5c6413fce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.282682] env[63297]: DEBUG oslo_vmware.api [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1352.282682] env[63297]: value = "task-1697287" [ 1352.282682] env[63297]: _type = "Task" [ 1352.282682] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.293193] env[63297]: DEBUG nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1352.295661] env[63297]: DEBUG oslo_vmware.api [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1697287, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.322829] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1352.323045] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1352.323261] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1352.323494] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1352.323804] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1352.323804] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1352.324303] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1352.324303] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1352.324470] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1352.324534] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1352.324682] env[63297]: DEBUG nova.virt.hardware [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1352.326175] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e13227d-7e65-470c-9164-6071078608c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.334305] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a26a56-8946-41b3-9c97-828a486fc296 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.412019] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.139s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.412019] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.905s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.413592] env[63297]: INFO nova.compute.claims [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1352.448035] env[63297]: INFO nova.scheduler.client.report [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Deleted allocations for instance 8bc4bb67-bc00-44c6-9c83-c0a1072142e6 [ 1352.522836] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697286, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062728} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.523112] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1352.523921] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a746af34-e23f-4333-9b6c-0900c2798b68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.546259] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 272180b9-e79b-4714-b28b-470937509f42/272180b9-e79b-4714-b28b-470937509f42.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.546923] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2206f28-06da-470a-ba9f-ff1943577543 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.569400] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1352.569400] env[63297]: value = "task-1697288" [ 1352.569400] env[63297]: _type = "Task" [ 1352.569400] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.577325] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697288, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.709192] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.794997] env[63297]: DEBUG oslo_vmware.api [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1697287, 'name': PowerOffVM_Task, 'duration_secs': 0.462447} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.795299] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1352.795466] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1352.795719] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2afe514a-3a9c-475b-beaa-e966961e912e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.872427] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1352.872742] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1352.872993] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Deleting the datastore file [datastore1] 754e64ec-b6fa-49d8-9de6-ef38918378fd {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1352.873243] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38d12c61-123f-4855-81a7-42cbe7bdeba5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.879683] env[63297]: DEBUG oslo_vmware.api [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for the task: (returnval){ [ 1352.879683] env[63297]: value = "task-1697290" [ 1352.879683] env[63297]: _type = "Task" [ 1352.879683] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.893121] env[63297]: DEBUG oslo_vmware.api [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1697290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.947672] env[63297]: DEBUG nova.network.neutron [req-330c8e95-9126-48d6-97e2-a3f0a3ebbd33 req-ca20846a-6ce0-4718-9603-157fe4821b07 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updated VIF entry in instance network info cache for port 1b123801-2747-40a9-84bc-ae5dc9595556. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1352.948087] env[63297]: DEBUG nova.network.neutron [req-330c8e95-9126-48d6-97e2-a3f0a3ebbd33 req-ca20846a-6ce0-4718-9603-157fe4821b07 service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updating instance_info_cache with network_info: [{"id": "1b123801-2747-40a9-84bc-ae5dc9595556", "address": "fa:16:3e:42:cf:1d", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b123801-27", "ovs_interfaceid": "1b123801-2747-40a9-84bc-ae5dc9595556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.958921] env[63297]: DEBUG oslo_concurrency.lockutils [None req-14783218-2eb0-436b-89cf-58c86562e2ca tempest-InstanceActionsV221TestJSON-1816705340 tempest-InstanceActionsV221TestJSON-1816705340-project-member] Lock "8bc4bb67-bc00-44c6-9c83-c0a1072142e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.470s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.046677] env[63297]: DEBUG nova.compute.manager [req-eafd1283-f290-4a96-b477-96d6f57e3f43 req-9ca68679-df68-463a-b199-1d05d94733c5 service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Received event network-vif-plugged-8461adeb-9363-4fea-ba94-2dfe3cc6305c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.046677] env[63297]: DEBUG oslo_concurrency.lockutils [req-eafd1283-f290-4a96-b477-96d6f57e3f43 req-9ca68679-df68-463a-b199-1d05d94733c5 service nova] Acquiring lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.046677] env[63297]: DEBUG oslo_concurrency.lockutils [req-eafd1283-f290-4a96-b477-96d6f57e3f43 req-9ca68679-df68-463a-b199-1d05d94733c5 service nova] Lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.046677] env[63297]: DEBUG oslo_concurrency.lockutils [req-eafd1283-f290-4a96-b477-96d6f57e3f43 req-9ca68679-df68-463a-b199-1d05d94733c5 service nova] Lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.047549] env[63297]: DEBUG nova.compute.manager [req-eafd1283-f290-4a96-b477-96d6f57e3f43 req-9ca68679-df68-463a-b199-1d05d94733c5 service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] No waiting events found dispatching network-vif-plugged-8461adeb-9363-4fea-ba94-2dfe3cc6305c {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.047990] env[63297]: WARNING nova.compute.manager [req-eafd1283-f290-4a96-b477-96d6f57e3f43 req-9ca68679-df68-463a-b199-1d05d94733c5 service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Received unexpected event network-vif-plugged-8461adeb-9363-4fea-ba94-2dfe3cc6305c for instance with vm_state building and task_state spawning. [ 1353.060106] env[63297]: DEBUG nova.network.neutron [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Successfully updated port: 8461adeb-9363-4fea-ba94-2dfe3cc6305c {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1353.086202] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697288, 'name': ReconfigVM_Task, 'duration_secs': 0.327318} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.086532] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 272180b9-e79b-4714-b28b-470937509f42/272180b9-e79b-4714-b28b-470937509f42.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.087219] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a67388d-cd51-4fb1-be85-ca1a61d5d71c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.093911] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1353.093911] env[63297]: value = "task-1697291" [ 1353.093911] env[63297]: _type = "Task" [ 1353.093911] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.103339] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697291, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.389695] env[63297]: DEBUG oslo_vmware.api [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Task: {'id': task-1697290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212503} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.390372] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1353.391415] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1353.391623] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1353.392552] env[63297]: INFO nova.compute.manager [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1353.392849] env[63297]: DEBUG oslo.service.loopingcall [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1353.393170] env[63297]: DEBUG nova.compute.manager [-] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1353.393170] env[63297]: DEBUG nova.network.neutron [-] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1353.452348] env[63297]: DEBUG oslo_concurrency.lockutils [req-330c8e95-9126-48d6-97e2-a3f0a3ebbd33 req-ca20846a-6ce0-4718-9603-157fe4821b07 service nova] Releasing lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.565019] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquiring lock "refresh_cache-41b1ce5d-a8ac-4b93-94a3-cf26367266d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.565019] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquired lock "refresh_cache-41b1ce5d-a8ac-4b93-94a3-cf26367266d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.565019] env[63297]: DEBUG nova.network.neutron [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.608807] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697291, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.987364] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c145990-575a-453c-9af0-862fd3e5802d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.994849] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94733dcb-2346-42a3-9cd1-9f2dd761a06b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.025687] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343ae531-673b-4d57-8677-4a158d1c33a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.035664] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9016e9-abbe-4c72-b50a-fb8055550a9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.050900] env[63297]: DEBUG nova.compute.provider_tree [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.109172] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697291, 'name': Rename_Task, 'duration_secs': 0.879983} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.110537] env[63297]: DEBUG nova.network.neutron [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1354.112680] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1354.113062] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef6470bd-a161-48f2-b9a2-a32176f814d4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.122049] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1354.122049] env[63297]: value = "task-1697292" [ 1354.122049] env[63297]: _type = "Task" [ 1354.122049] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.129200] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697292, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.317901] env[63297]: DEBUG nova.network.neutron [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Updating instance_info_cache with network_info: [{"id": "8461adeb-9363-4fea-ba94-2dfe3cc6305c", "address": "fa:16:3e:42:aa:7d", "network": {"id": "f71dc9ec-1bfa-4ee5-ba67-acf6156d0fa2", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-692892777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a54071d8478e49b4a519ace919960911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8461adeb-93", "ovs_interfaceid": "8461adeb-9363-4fea-ba94-2dfe3cc6305c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.441018] env[63297]: DEBUG nova.network.neutron [-] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.559128] env[63297]: DEBUG nova.scheduler.client.report [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1354.564055] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "99cc8af3-5c18-4839-94db-996861e0c276" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.564055] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "99cc8af3-5c18-4839-94db-996861e0c276" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.564055] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "99cc8af3-5c18-4839-94db-996861e0c276-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.564055] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "99cc8af3-5c18-4839-94db-996861e0c276-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.564287] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "99cc8af3-5c18-4839-94db-996861e0c276-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.566132] env[63297]: INFO nova.compute.manager [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Terminating instance [ 1354.568390] env[63297]: DEBUG nova.compute.manager [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1354.568390] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1354.568671] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2844617c-d79a-4879-bf71-c78bc4cacd8c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.579814] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1354.581387] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4970b6d-561a-497c-b7f1-b218bb542051 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.596018] env[63297]: DEBUG oslo_vmware.api [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1354.596018] env[63297]: value = "task-1697293" [ 1354.596018] env[63297]: _type = "Task" [ 1354.596018] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.606152] env[63297]: DEBUG oslo_vmware.api [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697293, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.631217] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697292, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.825656] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Releasing lock "refresh_cache-41b1ce5d-a8ac-4b93-94a3-cf26367266d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.825656] env[63297]: DEBUG nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Instance network_info: |[{"id": "8461adeb-9363-4fea-ba94-2dfe3cc6305c", "address": "fa:16:3e:42:aa:7d", "network": {"id": "f71dc9ec-1bfa-4ee5-ba67-acf6156d0fa2", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-692892777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a54071d8478e49b4a519ace919960911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8461adeb-93", "ovs_interfaceid": "8461adeb-9363-4fea-ba94-2dfe3cc6305c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1354.826188] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:aa:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8461adeb-9363-4fea-ba94-2dfe3cc6305c', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1354.834969] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Creating folder: Project (a54071d8478e49b4a519ace919960911). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1354.835198] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6046b53a-4a5e-4d27-bb2a-b60be4f82173 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.847561] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Created folder: Project (a54071d8478e49b4a519ace919960911) in parent group-v353718. [ 1354.847763] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Creating folder: Instances. Parent ref: group-v353857. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1354.848022] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5373f35e-3a78-4409-bcbf-39399148357d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.861508] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Created folder: Instances in parent group-v353857. [ 1354.861865] env[63297]: DEBUG oslo.service.loopingcall [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1354.861958] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1354.862193] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0884c1d6-3acc-442d-aa0d-d80261047898 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.896375] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1354.896375] env[63297]: value = "task-1697296" [ 1354.896375] env[63297]: _type = "Task" [ 1354.896375] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.906475] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697296, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.944251] env[63297]: INFO nova.compute.manager [-] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Took 1.55 seconds to deallocate network for instance. [ 1355.071278] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.071278] env[63297]: DEBUG nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1355.074673] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.711s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.076711] env[63297]: INFO nova.compute.claims [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1355.085455] env[63297]: DEBUG nova.compute.manager [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Received event network-changed-8461adeb-9363-4fea-ba94-2dfe3cc6305c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1355.085658] env[63297]: DEBUG nova.compute.manager [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Refreshing instance network info cache due to event network-changed-8461adeb-9363-4fea-ba94-2dfe3cc6305c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1355.085893] env[63297]: DEBUG oslo_concurrency.lockutils [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] Acquiring lock "refresh_cache-41b1ce5d-a8ac-4b93-94a3-cf26367266d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.086375] env[63297]: DEBUG oslo_concurrency.lockutils [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] Acquired lock "refresh_cache-41b1ce5d-a8ac-4b93-94a3-cf26367266d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.086375] env[63297]: DEBUG nova.network.neutron [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Refreshing network info cache for port 8461adeb-9363-4fea-ba94-2dfe3cc6305c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1355.104798] env[63297]: DEBUG oslo_vmware.api [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697293, 'name': PowerOffVM_Task, 'duration_secs': 0.211606} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.105492] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1355.105745] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1355.106679] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d7b788e-792a-4404-bd6a-df0979cb2fe6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.131991] env[63297]: DEBUG oslo_vmware.api [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697292, 'name': PowerOnVM_Task, 'duration_secs': 0.596183} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.132293] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1355.132728] env[63297]: INFO nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Took 9.74 seconds to spawn the instance on the hypervisor. [ 1355.132728] env[63297]: DEBUG nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1355.133739] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7273fd-b3fd-43b6-845c-3746214e467d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.191988] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1355.192235] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1355.192424] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleting the datastore file [datastore1] 99cc8af3-5c18-4839-94db-996861e0c276 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1355.192756] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d76edb67-f602-4e7b-8e03-71a4a991a7dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.200604] env[63297]: DEBUG oslo_vmware.api [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1355.200604] env[63297]: value = "task-1697298" [ 1355.200604] env[63297]: _type = "Task" [ 1355.200604] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.215426] env[63297]: DEBUG oslo_vmware.api [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.416236] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697296, 'name': CreateVM_Task, 'duration_secs': 0.313267} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.416365] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1355.418705] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.418705] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.418705] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1355.418705] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-020d3b02-05d7-46aa-8105-c8d7a4fa2547 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.422895] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1355.422895] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525692ba-b78b-f245-8bf8-2179d2b30524" [ 1355.422895] env[63297]: _type = "Task" [ 1355.422895] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.431142] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525692ba-b78b-f245-8bf8-2179d2b30524, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.456876] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.585465] env[63297]: DEBUG nova.compute.utils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1355.588783] env[63297]: DEBUG nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1355.588783] env[63297]: DEBUG nova.network.neutron [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1355.664278] env[63297]: INFO nova.compute.manager [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Took 33.26 seconds to build instance. [ 1355.690303] env[63297]: DEBUG nova.policy [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc041fcfaf8543ed829cb62fffa3b883', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa1a1e0788594eb292e3fce25ed37bd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1355.713156] env[63297]: DEBUG oslo_vmware.api [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166837} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.713218] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1355.713361] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1355.713547] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1355.713708] env[63297]: INFO nova.compute.manager [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1355.714022] env[63297]: DEBUG oslo.service.loopingcall [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1355.714156] env[63297]: DEBUG nova.compute.manager [-] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1355.714282] env[63297]: DEBUG nova.network.neutron [-] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1355.934314] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525692ba-b78b-f245-8bf8-2179d2b30524, 'name': SearchDatastore_Task, 'duration_secs': 0.010027} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.935476] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.935476] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1355.935476] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.935476] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.935476] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1355.935871] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55ab7b31-c9f7-4bc9-be1a-6dcbe73b20be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.946713] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1355.946713] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1355.947430] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec73343f-9f27-4bc4-8faf-8158589360c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.954549] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1355.954549] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f33176-a578-dc15-1a7f-0b30522641b2" [ 1355.954549] env[63297]: _type = "Task" [ 1355.954549] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.964404] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f33176-a578-dc15-1a7f-0b30522641b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.967027] env[63297]: DEBUG nova.network.neutron [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Updated VIF entry in instance network info cache for port 8461adeb-9363-4fea-ba94-2dfe3cc6305c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1355.967027] env[63297]: DEBUG nova.network.neutron [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Updating instance_info_cache with network_info: [{"id": "8461adeb-9363-4fea-ba94-2dfe3cc6305c", "address": "fa:16:3e:42:aa:7d", "network": {"id": "f71dc9ec-1bfa-4ee5-ba67-acf6156d0fa2", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-692892777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a54071d8478e49b4a519ace919960911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8461adeb-93", "ovs_interfaceid": "8461adeb-9363-4fea-ba94-2dfe3cc6305c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.092016] env[63297]: DEBUG nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1356.169326] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a3fd491d-1826-46a4-a2d1-1b7ca33c9f21 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "272180b9-e79b-4714-b28b-470937509f42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.353s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.466140] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f33176-a578-dc15-1a7f-0b30522641b2, 'name': SearchDatastore_Task, 'duration_secs': 0.033652} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.467338] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ecc38c6-27c9-4bdd-bbca-8dce7e3b0cc7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.473667] env[63297]: DEBUG oslo_concurrency.lockutils [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] Releasing lock "refresh_cache-41b1ce5d-a8ac-4b93-94a3-cf26367266d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.474100] env[63297]: DEBUG nova.compute.manager [req-539625e2-671b-4cf2-9283-c83cc3f70f71 req-187d1be9-e363-4089-b2ed-5f6aa4396ea0 service nova] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Received event network-vif-deleted-ea7a3748-62ea-4bc1-b3ba-0da9fab212c3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1356.481260] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1356.481260] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233f681-4a29-e997-ced2-686c14960111" [ 1356.481260] env[63297]: _type = "Task" [ 1356.481260] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.490843] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233f681-4a29-e997-ced2-686c14960111, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.675498] env[63297]: DEBUG nova.compute.manager [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1356.682943] env[63297]: DEBUG nova.network.neutron [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Successfully created port: d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1356.707175] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ced9bbb-fe54-4da4-a22b-82c208dc9c3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.720265] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc5b26e-1ad1-40b0-972f-48060dadc67d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.750359] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad02753c-c124-4911-bb6a-a7bacf51d988 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.759562] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894ad7c2-17b2-4523-b619-7a4f644ca9a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.774525] env[63297]: DEBUG nova.compute.provider_tree [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.996316] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233f681-4a29-e997-ced2-686c14960111, 'name': SearchDatastore_Task, 'duration_secs': 0.026535} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.996316] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.996408] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 41b1ce5d-a8ac-4b93-94a3-cf26367266d6/41b1ce5d-a8ac-4b93-94a3-cf26367266d6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1356.997773] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fdf716f-8e3e-4b76-a98d-df67e50edc32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.006016] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1357.006016] env[63297]: value = "task-1697299" [ 1357.006016] env[63297]: _type = "Task" [ 1357.006016] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.019026] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697299, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.035183] env[63297]: DEBUG nova.network.neutron [-] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.106570] env[63297]: DEBUG nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1357.147058] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:24:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5cd2709a-9ccb-43da-a92d-61f75514f90c',id=29,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-114049318',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1357.148361] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1357.148361] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1357.148361] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1357.148361] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1357.148627] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1357.148765] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1357.148847] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1357.149097] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1357.149326] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1357.149326] env[63297]: DEBUG nova.virt.hardware [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1357.151445] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7ecaa7-bc24-456d-8076-4166ff6f4df5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.162541] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68dcf40e-2d09-47a3-931d-6e650f4f09fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.198633] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.277951] env[63297]: DEBUG nova.scheduler.client.report [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1357.535364] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697299, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.536647] env[63297]: INFO nova.compute.manager [-] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Took 1.82 seconds to deallocate network for instance. [ 1357.590749] env[63297]: DEBUG nova.compute.manager [req-609aff7d-e308-457a-88e4-70c14b5b08dd req-9866b94c-c4ec-407a-a53a-7dc3e4280765 service nova] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Received event network-vif-deleted-565ddc43-5913-4f71-b8a3-0186c54805d3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1357.684515] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.684785] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.785033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.785033] env[63297]: DEBUG nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1357.787834] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 23.130s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.019113] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697299, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.674398} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.019481] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 41b1ce5d-a8ac-4b93-94a3-cf26367266d6/41b1ce5d-a8ac-4b93-94a3-cf26367266d6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1358.019755] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1358.020038] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d153364-5d0b-432a-a45b-2086a011d591 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.029304] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1358.029304] env[63297]: value = "task-1697300" [ 1358.029304] env[63297]: _type = "Task" [ 1358.029304] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.039187] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697300, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.046579] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.308499] env[63297]: DEBUG nova.compute.utils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1358.310298] env[63297]: DEBUG nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1358.310667] env[63297]: DEBUG nova.network.neutron [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1358.393286] env[63297]: DEBUG nova.policy [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e1b6a582c0994c50b1bd0f622d5728c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b71913fd12874e10983e1680c3f9dcea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1358.540802] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697300, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16216} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.541455] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1358.542382] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bc5e5a-b45a-4f87-9a89-16cdeb7ffb05 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.567532] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 41b1ce5d-a8ac-4b93-94a3-cf26367266d6/41b1ce5d-a8ac-4b93-94a3-cf26367266d6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1358.568096] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05953fd7-d06d-4c11-a6e3-86170d5e77bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.597164] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1358.597164] env[63297]: value = "task-1697301" [ 1358.597164] env[63297]: _type = "Task" [ 1358.597164] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.608328] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697301, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.676235] env[63297]: DEBUG nova.network.neutron [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Successfully updated port: d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1358.816952] env[63297]: DEBUG nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1358.843042] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ef851d71-788d-42f8-a824-5d30a89e957b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.843042] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ef57101e-1d8a-4ad5-ad68-cad2dbea33d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.843042] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b5d34058-fa3e-4806-97e5-638bbbffaeb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.843042] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 35c68986-51b5-43ba-a076-aca3c86d68bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.843350] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 4438e230-0589-48ae-8848-d1f8414efa61 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1358.843350] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 754e64ec-b6fa-49d8-9de6-ef38918378fd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1358.843350] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b14e8466-68ab-4705-a439-6db961a149b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.843525] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 14b4518e-044a-451a-845d-fa3742e5b3e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.843525] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 9b1306f9-4b0a-4116-8e79-271478f33490 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.843629] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b65e8c04-df55-491e-861c-8aa6def8c9be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b853b581-ea46-4455-8cdb-6ea2f31c22be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fba9040d-f904-44a1-8785-14d4696ea939 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 2d7b237e-f86d-42b1-ab04-320f0012a2d1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 8fa5fef6-8768-4e24-aab3-db56a10588c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance d15a7e98-755b-4c5c-ba34-dc5fc3f8846d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance f429dd9b-be6c-4e90-876b-3a3931fb1c4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5e158880-81a6-4d35-b1df-6fd59ba4a8ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 99cc8af3-5c18-4839-94db-996861e0c276 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 4e6b1296-9e19-4047-9c38-dc94c686d0cb actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5124f7fb-1293-4964-98c4-426ecfce7d10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 272180b9-e79b-4714-b28b-470937509f42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845756] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 41b1ce5d-a8ac-4b93-94a3-cf26367266d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845756] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b95b7656-70ac-4eaf-9934-4b4c50e78035 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.845756] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1358.891464] env[63297]: DEBUG nova.network.neutron [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Successfully created port: 1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1359.108296] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.179199] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.179407] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.179649] env[63297]: DEBUG nova.network.neutron [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1359.262035] env[63297]: DEBUG nova.compute.manager [req-ff7c6a27-119d-4868-8b41-903daefc2cc6 req-bb6db812-6c36-4681-9d0f-4ff8c2424ce7 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Received event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1359.262140] env[63297]: DEBUG nova.compute.manager [req-ff7c6a27-119d-4868-8b41-903daefc2cc6 req-bb6db812-6c36-4681-9d0f-4ff8c2424ce7 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing instance network info cache due to event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1359.262276] env[63297]: DEBUG oslo_concurrency.lockutils [req-ff7c6a27-119d-4868-8b41-903daefc2cc6 req-bb6db812-6c36-4681-9d0f-4ff8c2424ce7 service nova] Acquiring lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.262419] env[63297]: DEBUG oslo_concurrency.lockutils [req-ff7c6a27-119d-4868-8b41-903daefc2cc6 req-bb6db812-6c36-4681-9d0f-4ff8c2424ce7 service nova] Acquired lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.262578] env[63297]: DEBUG nova.network.neutron [req-ff7c6a27-119d-4868-8b41-903daefc2cc6 req-bb6db812-6c36-4681-9d0f-4ff8c2424ce7 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1359.322257] env[63297]: INFO nova.virt.block_device [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Booting with volume 950dc879-aa21-4a03-88c5-e4e67d4e27c0 at /dev/sda [ 1359.350394] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance c4e96403-895c-479d-bfb2-274a87446bf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1359.361801] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8451e65b-ae61-49a6-944e-749470c17884 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.374470] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd78a0af-7c53-459c-937a-3c01d5cb8fb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.414772] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a3a7e7e-4ce2-40f4-8d11-15f9fbb6028d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.424472] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988ac00c-4b75-4cb8-99cb-a36d3cde1738 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.464627] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9632dd41-c836-45af-a76a-fa7be1446398 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.470443] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bdffff-7ee3-40b4-8960-9d4f4d50557e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.487693] env[63297]: DEBUG nova.virt.block_device [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Updating existing volume attachment record: 6bd850c0-799f-4598-896c-b90da7bc851e {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1359.609019] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697301, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.663932] env[63297]: DEBUG nova.compute.manager [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Received event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1359.664150] env[63297]: DEBUG nova.compute.manager [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing instance network info cache due to event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1359.664347] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Acquiring lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.729016] env[63297]: DEBUG nova.network.neutron [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1359.852742] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 192c3a5d-3a23-4f78-8dc7-a256b6d9381d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1359.948266] env[63297]: DEBUG nova.network.neutron [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance_info_cache with network_info: [{"id": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "address": "fa:16:3e:21:7c:1d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9a7e1f4-1a", "ovs_interfaceid": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.087263] env[63297]: DEBUG nova.network.neutron [req-ff7c6a27-119d-4868-8b41-903daefc2cc6 req-bb6db812-6c36-4681-9d0f-4ff8c2424ce7 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updated VIF entry in instance network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1360.087620] env[63297]: DEBUG nova.network.neutron [req-ff7c6a27-119d-4868-8b41-903daefc2cc6 req-bb6db812-6c36-4681-9d0f-4ff8c2424ce7 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updating instance_info_cache with network_info: [{"id": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "address": "fa:16:3e:ed:e8:35", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73c7c5d-39", "ovs_interfaceid": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.110635] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697301, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.356250] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 6ce88b93-aa42-4f34-81fa-6c09c23ace81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.453493] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.453960] env[63297]: DEBUG nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Instance network_info: |[{"id": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "address": "fa:16:3e:21:7c:1d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9a7e1f4-1a", "ovs_interfaceid": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1360.454400] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:7c:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1360.462453] env[63297]: DEBUG oslo.service.loopingcall [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1360.462798] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1360.463219] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b4ee21e-e370-4f3a-8102-589338475e4e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.487125] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1360.487125] env[63297]: value = "task-1697302" [ 1360.487125] env[63297]: _type = "Task" [ 1360.487125] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.508416] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697302, 'name': CreateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.591986] env[63297]: DEBUG oslo_concurrency.lockutils [req-ff7c6a27-119d-4868-8b41-903daefc2cc6 req-bb6db812-6c36-4681-9d0f-4ff8c2424ce7 service nova] Releasing lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.591986] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Acquired lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.591986] env[63297]: DEBUG nova.network.neutron [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1360.610514] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697301, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.863311] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 765f3232-f3f9-4d9b-92f2-fb6603f2a90a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.915321] env[63297]: DEBUG nova.network.neutron [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Successfully updated port: 1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1360.999793] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697302, 'name': CreateVM_Task, 'duration_secs': 0.35668} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.999950] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1361.000585] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.000765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.001109] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1361.001896] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4799c08-db5d-45cc-88b5-c92f0efcf1fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.007482] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1361.007482] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522ec74d-d438-c752-9805-2bae91b18561" [ 1361.007482] env[63297]: _type = "Task" [ 1361.007482] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.015948] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522ec74d-d438-c752-9805-2bae91b18561, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.110072] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697301, 'name': ReconfigVM_Task, 'duration_secs': 2.032107} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.110513] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 41b1ce5d-a8ac-4b93-94a3-cf26367266d6/41b1ce5d-a8ac-4b93-94a3-cf26367266d6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1361.111275] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f28450a-8e0f-4d4b-8eb9-2a3797f89f06 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.120275] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1361.120275] env[63297]: value = "task-1697303" [ 1361.120275] env[63297]: _type = "Task" [ 1361.120275] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.128941] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697303, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.365817] env[63297]: DEBUG nova.compute.manager [req-12ae04c9-4d57-4bf7-8c86-222229532bab req-4ea6b9fb-7815-4422-852a-fab1121d4f8e service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Received event network-changed-701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1361.365945] env[63297]: DEBUG nova.compute.manager [req-12ae04c9-4d57-4bf7-8c86-222229532bab req-4ea6b9fb-7815-4422-852a-fab1121d4f8e service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Refreshing instance network info cache due to event network-changed-701615f1-5ab7-4150-8c46-9df42e68f228. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1361.366184] env[63297]: DEBUG oslo_concurrency.lockutils [req-12ae04c9-4d57-4bf7-8c86-222229532bab req-4ea6b9fb-7815-4422-852a-fab1121d4f8e service nova] Acquiring lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.366328] env[63297]: DEBUG oslo_concurrency.lockutils [req-12ae04c9-4d57-4bf7-8c86-222229532bab req-4ea6b9fb-7815-4422-852a-fab1121d4f8e service nova] Acquired lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.366487] env[63297]: DEBUG nova.network.neutron [req-12ae04c9-4d57-4bf7-8c86-222229532bab req-4ea6b9fb-7815-4422-852a-fab1121d4f8e service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Refreshing network info cache for port 701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1361.367897] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 63785911-ea55-4aeb-9ba2-6cea5ddd9cae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1361.380503] env[63297]: DEBUG nova.network.neutron [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updated VIF entry in instance network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1361.380848] env[63297]: DEBUG nova.network.neutron [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updating instance_info_cache with network_info: [{"id": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "address": "fa:16:3e:ed:e8:35", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73c7c5d-39", "ovs_interfaceid": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.418063] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Acquiring lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.418335] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Acquired lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.418558] env[63297]: DEBUG nova.network.neutron [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1361.437400] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "272180b9-e79b-4714-b28b-470937509f42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.437572] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "272180b9-e79b-4714-b28b-470937509f42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.437793] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "272180b9-e79b-4714-b28b-470937509f42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.437981] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "272180b9-e79b-4714-b28b-470937509f42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.438171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "272180b9-e79b-4714-b28b-470937509f42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.440634] env[63297]: INFO nova.compute.manager [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Terminating instance [ 1361.442326] env[63297]: DEBUG nova.compute.manager [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1361.442514] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1361.443410] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5291085-c272-4054-81f8-668ff5234c84 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.451854] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1361.452116] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fbba193-f8c6-4fca-a7cd-d6837da54941 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.459620] env[63297]: DEBUG oslo_vmware.api [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1361.459620] env[63297]: value = "task-1697304" [ 1361.459620] env[63297]: _type = "Task" [ 1361.459620] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.469314] env[63297]: DEBUG oslo_vmware.api [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.518446] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522ec74d-d438-c752-9805-2bae91b18561, 'name': SearchDatastore_Task, 'duration_secs': 0.020217} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.518757] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.519052] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1361.519222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.520659] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.520659] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1361.520659] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c1e3ddd-4b0b-47a1-ba50-1ef8cd25fdcc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.530484] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1361.530666] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1361.531487] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15dc734e-3abe-4699-9cc6-e6c98dc83644 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.537402] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1361.537402] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5251996c-9d37-e803-6bd9-aa6d2cacc94c" [ 1361.537402] env[63297]: _type = "Task" [ 1361.537402] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.547030] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5251996c-9d37-e803-6bd9-aa6d2cacc94c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.590581] env[63297]: DEBUG nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1361.590650] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1361.591754] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1361.591754] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1361.591754] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1361.591754] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1361.591754] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1361.591998] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1361.592199] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1361.592428] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1361.592646] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1361.592932] env[63297]: DEBUG nova.virt.hardware [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1361.594052] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c6cc75-b06b-4c0d-aca3-c94a0246a34f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.604414] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed379b6-4d28-47cb-a86a-43c4e033eedb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.634411] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697303, 'name': Rename_Task, 'duration_secs': 0.317684} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.634906] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.635167] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5279a102-6162-4839-bde9-e085b13682c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.646572] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1361.646572] env[63297]: value = "task-1697305" [ 1361.646572] env[63297]: _type = "Task" [ 1361.646572] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.656473] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.872586] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance dc196e68-b725-43a1-9848-e84d1b138245 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1361.883057] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Releasing lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.883279] env[63297]: DEBUG nova.compute.manager [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Received event network-vif-plugged-d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1361.883508] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Acquiring lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.883768] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.883985] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.884184] env[63297]: DEBUG nova.compute.manager [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] No waiting events found dispatching network-vif-plugged-d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1361.884392] env[63297]: WARNING nova.compute.manager [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Received unexpected event network-vif-plugged-d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb for instance with vm_state building and task_state spawning. [ 1361.884597] env[63297]: DEBUG nova.compute.manager [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Received event network-changed-d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1361.884805] env[63297]: DEBUG nova.compute.manager [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Refreshing instance network info cache due to event network-changed-d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1361.885033] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Acquiring lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.885216] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Acquired lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.885409] env[63297]: DEBUG nova.network.neutron [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Refreshing network info cache for port d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1361.926549] env[63297]: DEBUG nova.compute.manager [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Received event network-changed-701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1361.926810] env[63297]: DEBUG nova.compute.manager [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Refreshing instance network info cache due to event network-changed-701615f1-5ab7-4150-8c46-9df42e68f228. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1361.927046] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Acquiring lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.958979] env[63297]: DEBUG nova.network.neutron [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1361.972677] env[63297]: DEBUG oslo_vmware.api [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697304, 'name': PowerOffVM_Task, 'duration_secs': 0.221954} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.972821] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1361.972982] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1361.973272] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f3cba2e-f802-4582-a074-b372fbdedf37 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.052609] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5251996c-9d37-e803-6bd9-aa6d2cacc94c, 'name': SearchDatastore_Task, 'duration_secs': 0.009439} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.053495] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c1c31a3-ce9d-4791-bae2-28ba70ae8aa0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.065307] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1362.065307] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5230a87c-2622-10d6-15f4-695a606d8a07" [ 1362.065307] env[63297]: _type = "Task" [ 1362.065307] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.081025] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5230a87c-2622-10d6-15f4-695a606d8a07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.083267] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1362.083477] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1362.083648] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Deleting the datastore file [datastore1] 272180b9-e79b-4714-b28b-470937509f42 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1362.083996] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07c2e5c2-f714-49c3-9bf8-00b0644bd154 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.093462] env[63297]: DEBUG oslo_vmware.api [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1362.093462] env[63297]: value = "task-1697307" [ 1362.093462] env[63297]: _type = "Task" [ 1362.093462] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.102510] env[63297]: DEBUG oslo_vmware.api [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.147650] env[63297]: DEBUG nova.network.neutron [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Updating instance_info_cache with network_info: [{"id": "1354943e-9dda-4b43-9508-a9535e5627e5", "address": "fa:16:3e:1e:6a:b2", "network": {"id": "924d0fb5-2ab1-41f7-8b87-7613598cc00d", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-719788927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b71913fd12874e10983e1680c3f9dcea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1354943e-9d", "ovs_interfaceid": "1354943e-9dda-4b43-9508-a9535e5627e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.159892] env[63297]: DEBUG oslo_vmware.api [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697305, 'name': PowerOnVM_Task, 'duration_secs': 0.470646} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.160729] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.160949] env[63297]: INFO nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Took 9.87 seconds to spawn the instance on the hypervisor. [ 1362.161208] env[63297]: DEBUG nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1362.162151] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e7a76e-62bf-4fa3-9c78-3c98154008ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.252406] env[63297]: DEBUG nova.network.neutron [req-12ae04c9-4d57-4bf7-8c86-222229532bab req-4ea6b9fb-7815-4422-852a-fab1121d4f8e service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Updated VIF entry in instance network info cache for port 701615f1-5ab7-4150-8c46-9df42e68f228. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1362.253262] env[63297]: DEBUG nova.network.neutron [req-12ae04c9-4d57-4bf7-8c86-222229532bab req-4ea6b9fb-7815-4422-852a-fab1121d4f8e service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Updating instance_info_cache with network_info: [{"id": "701615f1-5ab7-4150-8c46-9df42e68f228", "address": "fa:16:3e:76:43:67", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap701615f1-5a", "ovs_interfaceid": "701615f1-5ab7-4150-8c46-9df42e68f228", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.377968] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fb33135a-073d-4d80-9833-5b29afae1cc6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1362.581846] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5230a87c-2622-10d6-15f4-695a606d8a07, 'name': SearchDatastore_Task, 'duration_secs': 0.014233} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.581846] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.581846] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035/b95b7656-70ac-4eaf-9934-4b4c50e78035.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1362.581846] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd4cdbc5-2c68-47ed-98c7-6ac4fa817ef6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.589895] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1362.589895] env[63297]: value = "task-1697308" [ 1362.589895] env[63297]: _type = "Task" [ 1362.589895] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.607258] env[63297]: DEBUG oslo_vmware.api [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155379} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.608159] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.608551] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1362.611021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1362.611021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1362.611021] env[63297]: INFO nova.compute.manager [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: 272180b9-e79b-4714-b28b-470937509f42] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1362.611021] env[63297]: DEBUG oslo.service.loopingcall [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1362.611021] env[63297]: DEBUG nova.compute.manager [-] [instance: 272180b9-e79b-4714-b28b-470937509f42] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1362.611021] env[63297]: DEBUG nova.network.neutron [-] [instance: 272180b9-e79b-4714-b28b-470937509f42] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1362.663062] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Releasing lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.663062] env[63297]: DEBUG nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Instance network_info: |[{"id": "1354943e-9dda-4b43-9508-a9535e5627e5", "address": "fa:16:3e:1e:6a:b2", "network": {"id": "924d0fb5-2ab1-41f7-8b87-7613598cc00d", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-719788927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b71913fd12874e10983e1680c3f9dcea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1354943e-9d", "ovs_interfaceid": "1354943e-9dda-4b43-9508-a9535e5627e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1362.663062] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:6a:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d054505-89d3-49c5-8b38-5da917a42c49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1354943e-9dda-4b43-9508-a9535e5627e5', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1362.673908] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Creating folder: Project (b71913fd12874e10983e1680c3f9dcea). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1362.675630] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a868bd4e-26c8-44c8-93f7-6bc1654d41e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.687628] env[63297]: INFO nova.compute.manager [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Took 38.19 seconds to build instance. [ 1362.696739] env[63297]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1362.696941] env[63297]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63297) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1362.697403] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Folder already exists: Project (b71913fd12874e10983e1680c3f9dcea). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1362.697660] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Creating folder: Instances. Parent ref: group-v353819. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1362.697943] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9504d34-a1b3-4f07-a7e0-7ca23b7ded28 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.709150] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Created folder: Instances in parent group-v353819. [ 1362.709400] env[63297]: DEBUG oslo.service.loopingcall [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1362.712311] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1362.712752] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c44b95c7-6dbe-43a3-9e07-d067579e4ce0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.738321] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1362.738321] env[63297]: value = "task-1697311" [ 1362.738321] env[63297]: _type = "Task" [ 1362.738321] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.747877] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697311, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.755709] env[63297]: DEBUG oslo_concurrency.lockutils [req-12ae04c9-4d57-4bf7-8c86-222229532bab req-4ea6b9fb-7815-4422-852a-fab1121d4f8e service nova] Releasing lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.756183] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Acquired lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.756375] env[63297]: DEBUG nova.network.neutron [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Refreshing network info cache for port 701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1362.786181] env[63297]: DEBUG nova.network.neutron [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updated VIF entry in instance network info cache for port d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1362.786786] env[63297]: DEBUG nova.network.neutron [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance_info_cache with network_info: [{"id": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "address": "fa:16:3e:21:7c:1d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9a7e1f4-1a", "ovs_interfaceid": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.882742] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance c147f97d-7fae-4364-a9c0-04978df2450f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1363.102853] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697308, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.191138] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e625f0d7-5e82-4d2d-9202-f7a2760fc08b tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.032s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.250326] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697311, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.279349] env[63297]: INFO nova.network.neutron [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Port 701615f1-5ab7-4150-8c46-9df42e68f228 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1363.279349] env[63297]: DEBUG nova.network.neutron [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.289607] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6c1e760-d69f-450a-ab48-c4a7bcbc7885 req-c28ddaf5-7708-4884-a346-115fddf125a6 service nova] Releasing lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.385720] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance c83c23d9-a8ec-4a87-8a8c-067e18d2615a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1363.465523] env[63297]: DEBUG nova.network.neutron [-] [instance: 272180b9-e79b-4714-b28b-470937509f42] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.566144] env[63297]: DEBUG nova.compute.manager [req-1d3d1773-644a-49db-869a-5541ec32761f req-99527418-7604-4667-8338-d67f3a9fad05 service nova] [instance: 272180b9-e79b-4714-b28b-470937509f42] Received event network-vif-deleted-701615f1-5ab7-4150-8c46-9df42e68f228 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1363.605014] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524928} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.605456] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035/b95b7656-70ac-4eaf-9934-4b4c50e78035.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1363.605659] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1363.605791] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd9c9a81-33e2-4f15-9bf6-5c0384e50340 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.618157] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1363.618157] env[63297]: value = "task-1697312" [ 1363.618157] env[63297]: _type = "Task" [ 1363.618157] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.625623] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.692935] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1363.752397] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697311, 'name': CreateVM_Task, 'duration_secs': 0.659995} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.752606] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1363.753390] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353831', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'name': 'volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a57c0638-e14b-4474-a6b4-7184d7e2a0fe', 'attached_at': '', 'detached_at': '', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'serial': '950dc879-aa21-4a03-88c5-e4e67d4e27c0'}, 'mount_device': '/dev/sda', 'disk_bus': None, 'attachment_id': '6bd850c0-799f-4598-896c-b90da7bc851e', 'guest_format': None, 'device_type': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63297) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1363.754415] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Root volume attach. Driver type: vmdk {{(pid=63297) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1363.754523] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af389574-25cf-45e1-9bfa-f7d8daf523ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.765880] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d85c920-de97-458c-987a-ca3cf3aff697 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.774021] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fad685-1ffb-41a0-a470-81f827ce0173 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.780998] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-2c6d9740-3db9-462a-b312-ed2d90e8995f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.783380] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Releasing lock "refresh_cache-272180b9-e79b-4714-b28b-470937509f42" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.783791] env[63297]: DEBUG nova.compute.manager [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Received event network-vif-plugged-1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1363.784119] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Acquiring lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.784466] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.784771] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.785106] env[63297]: DEBUG nova.compute.manager [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] No waiting events found dispatching network-vif-plugged-1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1363.785429] env[63297]: WARNING nova.compute.manager [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Received unexpected event network-vif-plugged-1354943e-9dda-4b43-9508-a9535e5627e5 for instance with vm_state building and task_state spawning. [ 1363.785744] env[63297]: DEBUG nova.compute.manager [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Received event network-changed-1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1363.786076] env[63297]: DEBUG nova.compute.manager [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Refreshing instance network info cache due to event network-changed-1354943e-9dda-4b43-9508-a9535e5627e5. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1363.786411] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Acquiring lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.786686] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Acquired lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.786986] env[63297]: DEBUG nova.network.neutron [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Refreshing network info cache for port 1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1363.793745] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1363.793745] env[63297]: value = "task-1697313" [ 1363.793745] env[63297]: _type = "Task" [ 1363.793745] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.804347] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697313, 'name': RelocateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.888215] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b1ed5d76-d358-49d3-a854-8f968bc987ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1363.888543] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1363.888693] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1363.971300] env[63297]: INFO nova.compute.manager [-] [instance: 272180b9-e79b-4714-b28b-470937509f42] Took 1.36 seconds to deallocate network for instance. [ 1364.127823] env[63297]: DEBUG nova.compute.manager [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Received event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1364.128014] env[63297]: DEBUG nova.compute.manager [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing instance network info cache due to event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1364.128231] env[63297]: DEBUG oslo_concurrency.lockutils [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] Acquiring lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.128367] env[63297]: DEBUG oslo_concurrency.lockutils [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] Acquired lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.128525] env[63297]: DEBUG nova.network.neutron [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1364.133120] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07006} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.133708] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1364.134462] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8de19c4-4328-418d-86f9-acbbe65b743b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.159097] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035/b95b7656-70ac-4eaf-9934-4b4c50e78035.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1364.162743] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ac7f7d1-d8f7-4f62-8037-9671e9bdd7e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.185600] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1364.185600] env[63297]: value = "task-1697314" [ 1364.185600] env[63297]: _type = "Task" [ 1364.185600] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.200471] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697314, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.217546] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.305023] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697313, 'name': RelocateVM_Task} progress is 19%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.469835] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6708d21a-533c-4342-aa38-150c30b9a099 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.478794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.480010] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2724bde-77e2-4ef8-b33d-4a872e74a896 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.516740] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a686da-6085-42df-88fe-614c7a9c4328 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.533739] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bb6183-6d0f-43d3-b052-13aed30b5320 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.549349] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.557403] env[63297]: DEBUG nova.network.neutron [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Updated VIF entry in instance network info cache for port 1354943e-9dda-4b43-9508-a9535e5627e5. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1364.557792] env[63297]: DEBUG nova.network.neutron [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Updating instance_info_cache with network_info: [{"id": "1354943e-9dda-4b43-9508-a9535e5627e5", "address": "fa:16:3e:1e:6a:b2", "network": {"id": "924d0fb5-2ab1-41f7-8b87-7613598cc00d", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-719788927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b71913fd12874e10983e1680c3f9dcea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1354943e-9d", "ovs_interfaceid": "1354943e-9dda-4b43-9508-a9535e5627e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.699512] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697314, 'name': ReconfigVM_Task, 'duration_secs': 0.281742} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.700019] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfigured VM instance instance-00000030 to attach disk [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035/b95b7656-70ac-4eaf-9934-4b4c50e78035.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1364.701453] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfaa6a55-d4bd-4718-b53d-cad9f3359101 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.710977] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1364.710977] env[63297]: value = "task-1697315" [ 1364.710977] env[63297]: _type = "Task" [ 1364.710977] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.723241] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697315, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.808684] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697313, 'name': RelocateVM_Task, 'duration_secs': 0.861076} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.809047] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1364.809276] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353831', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'name': 'volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a57c0638-e14b-4474-a6b4-7184d7e2a0fe', 'attached_at': '', 'detached_at': '', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'serial': '950dc879-aa21-4a03-88c5-e4e67d4e27c0'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1364.810166] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b58da0-d335-4a49-aae1-7d69fceffdc2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.834581] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9b4844-67ac-48cb-b17c-b9ff79db73e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.860405] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0/volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1364.861380] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9414f76d-5185-4d08-ac2b-43a78c7ce2ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.886884] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1364.886884] env[63297]: value = "task-1697316" [ 1364.886884] env[63297]: _type = "Task" [ 1364.886884] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.901038] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697316, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.915944] env[63297]: DEBUG nova.network.neutron [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updated VIF entry in instance network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1364.916357] env[63297]: DEBUG nova.network.neutron [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updating instance_info_cache with network_info: [{"id": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "address": "fa:16:3e:ed:e8:35", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73c7c5d-39", "ovs_interfaceid": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.053029] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1365.060525] env[63297]: DEBUG oslo_concurrency.lockutils [req-ed25e534-e0d6-49fd-a007-98448e9e7b7a req-f41021dc-92e5-4a93-aa59-dff0de20794f service nova] Releasing lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.223182] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697315, 'name': Rename_Task, 'duration_secs': 0.174475} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.223603] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1365.223972] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-818de2bf-3ed0-4c31-b1da-5e43e4410013 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.232030] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1365.232030] env[63297]: value = "task-1697317" [ 1365.232030] env[63297]: _type = "Task" [ 1365.232030] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.241075] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697317, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.319035] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquiring lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.319350] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.319579] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquiring lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.319766] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.319974] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.322405] env[63297]: INFO nova.compute.manager [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Terminating instance [ 1365.323909] env[63297]: DEBUG nova.compute.manager [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1365.324173] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1365.325011] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a445e5-16a1-4115-9edd-8b68af218ad6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.333849] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1365.334151] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31ab2b57-525b-480c-a473-720774b23476 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.341536] env[63297]: DEBUG oslo_vmware.api [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1365.341536] env[63297]: value = "task-1697318" [ 1365.341536] env[63297]: _type = "Task" [ 1365.341536] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.350494] env[63297]: DEBUG oslo_vmware.api [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.397891] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697316, 'name': ReconfigVM_Task, 'duration_secs': 0.28702} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.398210] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Reconfigured VM instance instance-00000031 to attach disk [datastore1] volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0/volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1365.403016] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34a6117a-5f41-41f2-8ecf-4f7847795a57 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.418696] env[63297]: DEBUG oslo_concurrency.lockutils [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] Releasing lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.418959] env[63297]: DEBUG nova.compute.manager [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Received event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1365.419226] env[63297]: DEBUG nova.compute.manager [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing instance network info cache due to event network-changed-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1365.419364] env[63297]: DEBUG oslo_concurrency.lockutils [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] Acquiring lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.419506] env[63297]: DEBUG oslo_concurrency.lockutils [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] Acquired lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.419658] env[63297]: DEBUG nova.network.neutron [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Refreshing network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1365.422318] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1365.422318] env[63297]: value = "task-1697319" [ 1365.422318] env[63297]: _type = "Task" [ 1365.422318] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.431816] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697319, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.557901] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1365.558184] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.770s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.558488] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.591s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.559986] env[63297]: INFO nova.compute.claims [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1365.743327] env[63297]: DEBUG oslo_vmware.api [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697317, 'name': PowerOnVM_Task, 'duration_secs': 0.494236} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.743657] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1365.743896] env[63297]: INFO nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1365.744153] env[63297]: DEBUG nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1365.744973] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4752418-f018-4fe9-b2d3-20e12983f58e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.853070] env[63297]: DEBUG oslo_vmware.api [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697318, 'name': PowerOffVM_Task, 'duration_secs': 0.242259} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.853463] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1365.853649] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1365.854254] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2712934a-05f7-4d40-85aa-120a470fdc94 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.935042] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697319, 'name': ReconfigVM_Task, 'duration_secs': 0.150793} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.935247] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353831', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'name': 'volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a57c0638-e14b-4474-a6b4-7184d7e2a0fe', 'attached_at': '', 'detached_at': '', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'serial': '950dc879-aa21-4a03-88c5-e4e67d4e27c0'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1365.935772] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a44c9c3-fea3-4e34-b7fa-e5a29acb9c2c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.943019] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1365.943019] env[63297]: value = "task-1697321" [ 1365.943019] env[63297]: _type = "Task" [ 1365.943019] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.951344] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697321, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.957574] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1365.957791] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1365.957968] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Deleting the datastore file [datastore1] 41b1ce5d-a8ac-4b93-94a3-cf26367266d6 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1365.960548] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f080fdd8-ce54-4cf2-b901-b1a83911383e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.967734] env[63297]: DEBUG oslo_vmware.api [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for the task: (returnval){ [ 1365.967734] env[63297]: value = "task-1697322" [ 1365.967734] env[63297]: _type = "Task" [ 1365.967734] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.978181] env[63297]: DEBUG oslo_vmware.api [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.199974] env[63297]: DEBUG nova.network.neutron [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updated VIF entry in instance network info cache for port e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1366.200360] env[63297]: DEBUG nova.network.neutron [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updating instance_info_cache with network_info: [{"id": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "address": "fa:16:3e:ed:e8:35", "network": {"id": "02b17772-010b-4af3-8bd2-16531a9a8b54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1224838679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20f891cd9bb546b9bfe8095234165327", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73c7c5d-39", "ovs_interfaceid": "e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.266016] env[63297]: INFO nova.compute.manager [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Took 36.79 seconds to build instance. [ 1366.454028] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697321, 'name': Rename_Task, 'duration_secs': 0.154685} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.454323] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1366.454571] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24d04d05-73df-4dc3-98f2-203aa15efed0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.462731] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1366.462731] env[63297]: value = "task-1697323" [ 1366.462731] env[63297]: _type = "Task" [ 1366.462731] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.471369] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697323, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.479040] env[63297]: DEBUG oslo_vmware.api [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Task: {'id': task-1697322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168316} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.479276] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1366.479454] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1366.479628] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1366.479797] env[63297]: INFO nova.compute.manager [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1366.480086] env[63297]: DEBUG oslo.service.loopingcall [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.480329] env[63297]: DEBUG nova.compute.manager [-] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1366.480455] env[63297]: DEBUG nova.network.neutron [-] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1366.705656] env[63297]: DEBUG oslo_concurrency.lockutils [req-4076d3ac-3bc0-4bb5-90ea-da633f0c8b51 req-9ce1c6c2-14b2-4e47-9776-eb6b6d4efcf5 service nova] Releasing lock "refresh_cache-b65e8c04-df55-491e-861c-8aa6def8c9be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.769580] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2878ad96-a094-4e06-a079-3bb0432f859f tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.346s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.861833] env[63297]: DEBUG nova.compute.manager [req-d40f655e-89ca-4b53-abfe-5b150b0f6dd4 req-e2ac789c-0f1b-4c24-b538-c3dffe6889bc service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Received event network-vif-deleted-8461adeb-9363-4fea-ba94-2dfe3cc6305c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1366.862312] env[63297]: INFO nova.compute.manager [req-d40f655e-89ca-4b53-abfe-5b150b0f6dd4 req-e2ac789c-0f1b-4c24-b538-c3dffe6889bc service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Neutron deleted interface 8461adeb-9363-4fea-ba94-2dfe3cc6305c; detaching it from the instance and deleting it from the info cache [ 1366.862739] env[63297]: DEBUG nova.network.neutron [req-d40f655e-89ca-4b53-abfe-5b150b0f6dd4 req-e2ac789c-0f1b-4c24-b538-c3dffe6889bc service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.974111] env[63297]: DEBUG oslo_vmware.api [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697323, 'name': PowerOnVM_Task, 'duration_secs': 0.459671} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.976483] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1366.976696] env[63297]: INFO nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Took 5.39 seconds to spawn the instance on the hypervisor. [ 1366.976872] env[63297]: DEBUG nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1366.977806] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3df61c-bbaa-4b37-b3a6-0e6826210a80 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.071518] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788821b4-5f2d-4775-91df-839e6c6cd2dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.079754] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc1234b-af43-48f3-90f9-4db55f367d16 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.112952] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc94b0a-fbf5-4c63-90c6-308ea2e577f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.121139] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416e4787-b9fe-4cc1-837e-7063a7523cb7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.136052] env[63297]: DEBUG nova.compute.provider_tree [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.190586] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-ef851d71-788d-42f8-a824-5d30a89e957b-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.190964] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-ef851d71-788d-42f8-a824-5d30a89e957b-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.191306] env[63297]: DEBUG nova.objects.instance [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'flavor' on Instance uuid ef851d71-788d-42f8-a824-5d30a89e957b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1367.282106] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1367.291020] env[63297]: DEBUG nova.network.neutron [-] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.369447] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6d9c254-4c70-4ff6-81a7-d19d5f2c085f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.379232] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1846543e-8fc9-41bb-a230-67160332e702 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.417092] env[63297]: DEBUG nova.compute.manager [req-d40f655e-89ca-4b53-abfe-5b150b0f6dd4 req-e2ac789c-0f1b-4c24-b538-c3dffe6889bc service nova] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Detach interface failed, port_id=8461adeb-9363-4fea-ba94-2dfe3cc6305c, reason: Instance 41b1ce5d-a8ac-4b93-94a3-cf26367266d6 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1367.495725] env[63297]: INFO nova.compute.manager [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Took 34.15 seconds to build instance. [ 1367.639684] env[63297]: DEBUG nova.scheduler.client.report [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1367.781016] env[63297]: DEBUG nova.objects.instance [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'pci_requests' on Instance uuid ef851d71-788d-42f8-a824-5d30a89e957b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1367.794044] env[63297]: INFO nova.compute.manager [-] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Took 1.31 seconds to deallocate network for instance. [ 1367.806108] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.997548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6258eaa-f9c7-402e-8185-214ecb8aaf01 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.960s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.153805] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.154413] env[63297]: DEBUG nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1368.159128] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.086s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.159338] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.161272] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.718s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.162694] env[63297]: INFO nova.compute.claims [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1368.169702] env[63297]: DEBUG nova.compute.manager [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1368.193156] env[63297]: INFO nova.scheduler.client.report [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Deleted allocations for instance 4438e230-0589-48ae-8848-d1f8414efa61 [ 1368.284869] env[63297]: DEBUG nova.objects.base [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1368.284869] env[63297]: DEBUG nova.network.neutron [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1368.303272] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.327105] env[63297]: DEBUG nova.policy [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1368.500455] env[63297]: DEBUG nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1368.660690] env[63297]: DEBUG nova.compute.utils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1368.664243] env[63297]: DEBUG nova.network.neutron [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Successfully created port: 1c481d81-78fe-48f3-9eb8-9180cb78ecdf {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1368.666173] env[63297]: DEBUG nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1368.666348] env[63297]: DEBUG nova.network.neutron [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1368.688953] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.701931] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f7bf9b91-fdb8-4f35-ac39-cd137e5b7e2e tempest-ListImageFiltersTestJSON-640311490 tempest-ListImageFiltersTestJSON-640311490-project-member] Lock "4438e230-0589-48ae-8848-d1f8414efa61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.076s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.790972] env[63297]: DEBUG nova.policy [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0da2fdb3c81747698f971951c5e0068b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc8039a70b34a269d3aed1ecb558b7e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1369.028212] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.169297] env[63297]: DEBUG nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1369.327994] env[63297]: DEBUG nova.network.neutron [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Successfully created port: fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1369.689146] env[63297]: DEBUG nova.compute.manager [req-d326980f-c98a-451f-914e-62aee2a47890 req-649428b5-4cb5-46b9-8437-043cfe02c2c9 service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Received event network-changed-1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1369.689345] env[63297]: DEBUG nova.compute.manager [req-d326980f-c98a-451f-914e-62aee2a47890 req-649428b5-4cb5-46b9-8437-043cfe02c2c9 service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Refreshing instance network info cache due to event network-changed-1354943e-9dda-4b43-9508-a9535e5627e5. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1369.689560] env[63297]: DEBUG oslo_concurrency.lockutils [req-d326980f-c98a-451f-914e-62aee2a47890 req-649428b5-4cb5-46b9-8437-043cfe02c2c9 service nova] Acquiring lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.689700] env[63297]: DEBUG oslo_concurrency.lockutils [req-d326980f-c98a-451f-914e-62aee2a47890 req-649428b5-4cb5-46b9-8437-043cfe02c2c9 service nova] Acquired lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.689861] env[63297]: DEBUG nova.network.neutron [req-d326980f-c98a-451f-914e-62aee2a47890 req-649428b5-4cb5-46b9-8437-043cfe02c2c9 service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Refreshing network info cache for port 1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1369.779089] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280d501b-2e53-4802-b8ff-ee577f9bc0fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.788328] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2114b0b5-9d1e-4fcc-8f85-d22736c27c1a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.828077] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b58003-32b4-4fc2-a6f9-6fb9135e7e35 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.836926] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4021fb13-9341-4af2-9e34-999432858bcd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.851352] env[63297]: DEBUG nova.compute.provider_tree [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.184143] env[63297]: DEBUG nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1370.197111] env[63297]: DEBUG nova.network.neutron [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Successfully updated port: 1c481d81-78fe-48f3-9eb8-9180cb78ecdf {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1370.211248] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1370.211542] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1370.211913] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1370.212189] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1370.212341] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1370.212484] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1370.212690] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1370.212887] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1370.214055] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1370.214055] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1370.214055] env[63297]: DEBUG nova.virt.hardware [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1370.214750] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c016c875-cfe1-4905-8097-a873cb4af160 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.229803] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3267cb64-5eb5-4852-a163-1b171d78fc6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.353848] env[63297]: DEBUG nova.scheduler.client.report [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1370.393900] env[63297]: DEBUG nova.compute.manager [req-8c9e67b8-b0a3-4748-8187-70c29776d898 req-8e1a0694-a0d1-4c83-bae9-84e96805fc74 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received event network-vif-plugged-1c481d81-78fe-48f3-9eb8-9180cb78ecdf {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1370.397018] env[63297]: DEBUG oslo_concurrency.lockutils [req-8c9e67b8-b0a3-4748-8187-70c29776d898 req-8e1a0694-a0d1-4c83-bae9-84e96805fc74 service nova] Acquiring lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.397018] env[63297]: DEBUG oslo_concurrency.lockutils [req-8c9e67b8-b0a3-4748-8187-70c29776d898 req-8e1a0694-a0d1-4c83-bae9-84e96805fc74 service nova] Lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.397018] env[63297]: DEBUG oslo_concurrency.lockutils [req-8c9e67b8-b0a3-4748-8187-70c29776d898 req-8e1a0694-a0d1-4c83-bae9-84e96805fc74 service nova] Lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.397018] env[63297]: DEBUG nova.compute.manager [req-8c9e67b8-b0a3-4748-8187-70c29776d898 req-8e1a0694-a0d1-4c83-bae9-84e96805fc74 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] No waiting events found dispatching network-vif-plugged-1c481d81-78fe-48f3-9eb8-9180cb78ecdf {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1370.397018] env[63297]: WARNING nova.compute.manager [req-8c9e67b8-b0a3-4748-8187-70c29776d898 req-8e1a0694-a0d1-4c83-bae9-84e96805fc74 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received unexpected event network-vif-plugged-1c481d81-78fe-48f3-9eb8-9180cb78ecdf for instance with vm_state active and task_state None. [ 1370.513345] env[63297]: DEBUG nova.network.neutron [req-d326980f-c98a-451f-914e-62aee2a47890 req-649428b5-4cb5-46b9-8437-043cfe02c2c9 service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Updated VIF entry in instance network info cache for port 1354943e-9dda-4b43-9508-a9535e5627e5. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1370.513906] env[63297]: DEBUG nova.network.neutron [req-d326980f-c98a-451f-914e-62aee2a47890 req-649428b5-4cb5-46b9-8437-043cfe02c2c9 service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Updating instance_info_cache with network_info: [{"id": "1354943e-9dda-4b43-9508-a9535e5627e5", "address": "fa:16:3e:1e:6a:b2", "network": {"id": "924d0fb5-2ab1-41f7-8b87-7613598cc00d", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-719788927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b71913fd12874e10983e1680c3f9dcea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1354943e-9d", "ovs_interfaceid": "1354943e-9dda-4b43-9508-a9535e5627e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.699852] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.700224] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.700383] env[63297]: DEBUG nova.network.neutron [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1370.858747] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.697s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.859815] env[63297]: DEBUG nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1370.863441] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.416s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.863692] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.866455] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.882s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.867233] env[63297]: INFO nova.compute.claims [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1370.901245] env[63297]: INFO nova.scheduler.client.report [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Deleted allocations for instance 2d7b237e-f86d-42b1-ab04-320f0012a2d1 [ 1371.020601] env[63297]: DEBUG oslo_concurrency.lockutils [req-d326980f-c98a-451f-914e-62aee2a47890 req-649428b5-4cb5-46b9-8437-043cfe02c2c9 service nova] Releasing lock "refresh_cache-a57c0638-e14b-4474-a6b4-7184d7e2a0fe" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.082926] env[63297]: DEBUG nova.network.neutron [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Successfully updated port: fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1371.243887] env[63297]: WARNING nova.network.neutron [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] 250ab20f-3057-41ed-bb65-926464a4e926 already exists in list: networks containing: ['250ab20f-3057-41ed-bb65-926464a4e926']. ignoring it [ 1371.371838] env[63297]: DEBUG nova.compute.utils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1371.379027] env[63297]: DEBUG nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1371.379027] env[63297]: DEBUG nova.network.neutron [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1371.412275] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3569c0d7-4c7d-497a-a9e9-6e2fc165b2d2 tempest-ServersTestMultiNic-1146954680 tempest-ServersTestMultiNic-1146954680-project-member] Lock "2d7b237e-f86d-42b1-ab04-320f0012a2d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.451s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.459545] env[63297]: DEBUG nova.policy [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92fc0015269142758d749fda05ac19ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'baf40bf7f33349cb8bb098887d1244ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1371.587845] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-c4e96403-895c-479d-bfb2-274a87446bf9" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.588033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-c4e96403-895c-479d-bfb2-274a87446bf9" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.588265] env[63297]: DEBUG nova.network.neutron [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1371.754332] env[63297]: DEBUG nova.network.neutron [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [{"id": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "address": "fa:16:3e:d1:15:40", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14298cd9-89", "ovs_interfaceid": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c481d81-78fe-48f3-9eb8-9180cb78ecdf", "address": "fa:16:3e:61:f5:2d", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c481d81-78", "ovs_interfaceid": "1c481d81-78fe-48f3-9eb8-9180cb78ecdf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.759441] env[63297]: DEBUG nova.compute.manager [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Received event network-vif-plugged-fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1371.759642] env[63297]: DEBUG oslo_concurrency.lockutils [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] Acquiring lock "c4e96403-895c-479d-bfb2-274a87446bf9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.759840] env[63297]: DEBUG oslo_concurrency.lockutils [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] Lock "c4e96403-895c-479d-bfb2-274a87446bf9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.760027] env[63297]: DEBUG oslo_concurrency.lockutils [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] Lock "c4e96403-895c-479d-bfb2-274a87446bf9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.760186] env[63297]: DEBUG nova.compute.manager [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] No waiting events found dispatching network-vif-plugged-fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1371.760348] env[63297]: WARNING nova.compute.manager [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Received unexpected event network-vif-plugged-fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4 for instance with vm_state building and task_state spawning. [ 1371.760548] env[63297]: DEBUG nova.compute.manager [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Received event network-changed-fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1371.760714] env[63297]: DEBUG nova.compute.manager [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Refreshing instance network info cache due to event network-changed-fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1371.761525] env[63297]: DEBUG oslo_concurrency.lockutils [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] Acquiring lock "refresh_cache-c4e96403-895c-479d-bfb2-274a87446bf9" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.876799] env[63297]: DEBUG nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1371.949046] env[63297]: DEBUG nova.network.neutron [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Successfully created port: 9ed4fb03-694f-424c-a7f8-b3f512a45b77 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1372.156535] env[63297]: DEBUG nova.network.neutron [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1372.257692] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.258355] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.258515] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.259402] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8901d8-3b27-424a-82c1-ab82933ecced {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.281773] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1372.282014] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1372.282177] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1372.282358] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1372.282500] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1372.282643] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1372.282905] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1372.283035] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1372.283236] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1372.283369] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1372.283532] env[63297]: DEBUG nova.virt.hardware [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1372.290267] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Reconfiguring VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1372.294239] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d1c748e-f9ab-4a8b-8f88-2ac46777e308 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.315380] env[63297]: DEBUG oslo_vmware.api [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1372.315380] env[63297]: value = "task-1697324" [ 1372.315380] env[63297]: _type = "Task" [ 1372.315380] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.325614] env[63297]: DEBUG oslo_vmware.api [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697324, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.421405] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c285bb-f1ac-49e8-b4d7-3a347b299e46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.430709] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dd2094-7724-4342-a6ca-cdb8901df5ad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.467417] env[63297]: DEBUG nova.network.neutron [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Updating instance_info_cache with network_info: [{"id": "fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4", "address": "fa:16:3e:c2:8b:c8", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfffa061f-0b", "ovs_interfaceid": "fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.471616] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66cf393-1d44-4896-bcd7-633ae3a42de0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.480869] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1ea40f-4c19-43a8-b4c8-98d3966dab02 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.498550] env[63297]: DEBUG nova.compute.provider_tree [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.811798] env[63297]: DEBUG nova.compute.manager [req-b7890e60-6d5d-4481-aa4c-0503d95ad5c3 req-fe8cc3bd-6949-4189-8050-ba03ec35f5d2 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received event network-changed-1c481d81-78fe-48f3-9eb8-9180cb78ecdf {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1372.812041] env[63297]: DEBUG nova.compute.manager [req-b7890e60-6d5d-4481-aa4c-0503d95ad5c3 req-fe8cc3bd-6949-4189-8050-ba03ec35f5d2 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Refreshing instance network info cache due to event network-changed-1c481d81-78fe-48f3-9eb8-9180cb78ecdf. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1372.812387] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7890e60-6d5d-4481-aa4c-0503d95ad5c3 req-fe8cc3bd-6949-4189-8050-ba03ec35f5d2 service nova] Acquiring lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.812460] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7890e60-6d5d-4481-aa4c-0503d95ad5c3 req-fe8cc3bd-6949-4189-8050-ba03ec35f5d2 service nova] Acquired lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.812584] env[63297]: DEBUG nova.network.neutron [req-b7890e60-6d5d-4481-aa4c-0503d95ad5c3 req-fe8cc3bd-6949-4189-8050-ba03ec35f5d2 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Refreshing network info cache for port 1c481d81-78fe-48f3-9eb8-9180cb78ecdf {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1372.827065] env[63297]: DEBUG oslo_vmware.api [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.893470] env[63297]: DEBUG nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1372.923383] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1372.924702] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1372.924702] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1372.924789] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1372.924871] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1372.925039] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1372.925269] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1372.925427] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1372.925591] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1372.925751] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1372.925925] env[63297]: DEBUG nova.virt.hardware [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1372.927180] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3094804c-3416-4b24-bf17-9bc1cd0ff718 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.937792] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1fad65-18da-4676-b289-482d25fe2317 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.976187] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-c4e96403-895c-479d-bfb2-274a87446bf9" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.977025] env[63297]: DEBUG nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Instance network_info: |[{"id": "fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4", "address": "fa:16:3e:c2:8b:c8", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfffa061f-0b", "ovs_interfaceid": "fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1372.977025] env[63297]: DEBUG oslo_concurrency.lockutils [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] Acquired lock "refresh_cache-c4e96403-895c-479d-bfb2-274a87446bf9" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.977843] env[63297]: DEBUG nova.network.neutron [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Refreshing network info cache for port fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1372.978916] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:8b:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1372.991852] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating folder: Project (efc8039a70b34a269d3aed1ecb558b7e). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1372.993536] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4e3a70e-8e08-4167-afb8-207b6cafb066 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.001323] env[63297]: DEBUG nova.scheduler.client.report [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1373.008620] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Created folder: Project (efc8039a70b34a269d3aed1ecb558b7e) in parent group-v353718. [ 1373.008758] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating folder: Instances. Parent ref: group-v353863. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1373.008908] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e790b3f-86b9-4fcc-a248-5b324db43e92 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.024423] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Created folder: Instances in parent group-v353863. [ 1373.024694] env[63297]: DEBUG oslo.service.loopingcall [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1373.026095] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1373.026095] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91a75248-44ba-4f35-ae00-3120c5b7b1a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.048581] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1373.048581] env[63297]: value = "task-1697327" [ 1373.048581] env[63297]: _type = "Task" [ 1373.048581] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.059552] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697327, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.329182] env[63297]: DEBUG oslo_vmware.api [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.508564] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.509291] env[63297]: DEBUG nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1373.513912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.969s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.515798] env[63297]: INFO nova.compute.claims [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1373.567764] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697327, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.834202] env[63297]: DEBUG oslo_vmware.api [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697324, 'name': ReconfigVM_Task, 'duration_secs': 1.288483} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.834202] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.834202] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Reconfigured VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1373.857486] env[63297]: DEBUG nova.network.neutron [req-b7890e60-6d5d-4481-aa4c-0503d95ad5c3 req-fe8cc3bd-6949-4189-8050-ba03ec35f5d2 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updated VIF entry in instance network info cache for port 1c481d81-78fe-48f3-9eb8-9180cb78ecdf. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.857910] env[63297]: DEBUG nova.network.neutron [req-b7890e60-6d5d-4481-aa4c-0503d95ad5c3 req-fe8cc3bd-6949-4189-8050-ba03ec35f5d2 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [{"id": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "address": "fa:16:3e:d1:15:40", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14298cd9-89", "ovs_interfaceid": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c481d81-78fe-48f3-9eb8-9180cb78ecdf", "address": "fa:16:3e:61:f5:2d", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c481d81-78", "ovs_interfaceid": "1c481d81-78fe-48f3-9eb8-9180cb78ecdf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.886527] env[63297]: DEBUG nova.network.neutron [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Updated VIF entry in instance network info cache for port fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.886914] env[63297]: DEBUG nova.network.neutron [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Updating instance_info_cache with network_info: [{"id": "fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4", "address": "fa:16:3e:c2:8b:c8", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfffa061f-0b", "ovs_interfaceid": "fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.022027] env[63297]: DEBUG nova.compute.utils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1374.025726] env[63297]: DEBUG nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1374.025999] env[63297]: DEBUG nova.network.neutron [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1374.062075] env[63297]: DEBUG nova.network.neutron [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Successfully updated port: 9ed4fb03-694f-424c-a7f8-b3f512a45b77 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1374.075796] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697327, 'name': CreateVM_Task, 'duration_secs': 0.576318} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.077905] env[63297]: DEBUG nova.policy [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ab498375eb47a3923ac10343c11d34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d5cb4b4799b4b8b99648e718dbc0254', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1374.079386] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1374.080547] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.080754] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.081167] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1374.082291] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c38c4074-535d-4b56-ab42-6d343f1096ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.088650] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1374.088650] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526bab18-ef59-708f-d28a-55206c706ec8" [ 1374.088650] env[63297]: _type = "Task" [ 1374.088650] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.099139] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526bab18-ef59-708f-d28a-55206c706ec8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.209889] env[63297]: DEBUG nova.compute.manager [req-d2ccc94a-6135-42e2-a3b9-edca18e3782f req-d406de81-ffe0-4387-990f-2c2feb45cac5 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Received event network-vif-plugged-9ed4fb03-694f-424c-a7f8-b3f512a45b77 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1374.210265] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2ccc94a-6135-42e2-a3b9-edca18e3782f req-d406de81-ffe0-4387-990f-2c2feb45cac5 service nova] Acquiring lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.210811] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2ccc94a-6135-42e2-a3b9-edca18e3782f req-d406de81-ffe0-4387-990f-2c2feb45cac5 service nova] Lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.210811] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2ccc94a-6135-42e2-a3b9-edca18e3782f req-d406de81-ffe0-4387-990f-2c2feb45cac5 service nova] Lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.210973] env[63297]: DEBUG nova.compute.manager [req-d2ccc94a-6135-42e2-a3b9-edca18e3782f req-d406de81-ffe0-4387-990f-2c2feb45cac5 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] No waiting events found dispatching network-vif-plugged-9ed4fb03-694f-424c-a7f8-b3f512a45b77 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1374.211665] env[63297]: WARNING nova.compute.manager [req-d2ccc94a-6135-42e2-a3b9-edca18e3782f req-d406de81-ffe0-4387-990f-2c2feb45cac5 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Received unexpected event network-vif-plugged-9ed4fb03-694f-424c-a7f8-b3f512a45b77 for instance with vm_state building and task_state spawning. [ 1374.344647] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d32e2335-3d53-4acb-b905-f90d6700c16a tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-ef851d71-788d-42f8-a824-5d30a89e957b-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.154s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.361556] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7890e60-6d5d-4481-aa4c-0503d95ad5c3 req-fe8cc3bd-6949-4189-8050-ba03ec35f5d2 service nova] Releasing lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.389678] env[63297]: DEBUG oslo_concurrency.lockutils [req-53df58a1-d350-4ea2-8e0c-51e14eff962d req-c67c44bb-b0f8-494a-9054-c747409ffbee service nova] Releasing lock "refresh_cache-c4e96403-895c-479d-bfb2-274a87446bf9" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.391068] env[63297]: DEBUG nova.network.neutron [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Successfully created port: a3fa7a52-2cf8-470b-951c-9f0de053dd1a {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1374.526754] env[63297]: DEBUG nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1374.570021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "refresh_cache-192c3a5d-3a23-4f78-8dc7-a256b6d9381d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.570021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "refresh_cache-192c3a5d-3a23-4f78-8dc7-a256b6d9381d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.570021] env[63297]: DEBUG nova.network.neutron [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.604519] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526bab18-ef59-708f-d28a-55206c706ec8, 'name': SearchDatastore_Task, 'duration_secs': 0.037889} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.607591] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.607835] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1374.608257] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.609082] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.609082] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1374.609260] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50c1776b-4123-482f-8c50-41684f7b9976 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.622661] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1374.622749] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1374.623650] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f90de11-f90a-4dd2-b781-308e6f459952 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.635646] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1374.635646] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52053e01-2845-e017-184a-c0f309aa8ddd" [ 1374.635646] env[63297]: _type = "Task" [ 1374.635646] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.654698] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52053e01-2845-e017-184a-c0f309aa8ddd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.115489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090072dd-269e-4a6b-b149-95ebc862d6bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.126319] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7f074c-3e8e-4bac-9d24-326d7c58828e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.130460] env[63297]: DEBUG nova.network.neutron [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1375.170619] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cffae24-90ce-49f1-a82d-2b051882e546 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.184756] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52053e01-2845-e017-184a-c0f309aa8ddd, 'name': SearchDatastore_Task, 'duration_secs': 0.018016} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.185621] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c66331-3a2b-418d-863a-9da0006049ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.191832] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a1cc3c4-3a75-4681-ab30-8c0e15324e4e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.198153] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1375.198153] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524536d5-ee54-6dd2-159e-afd4a42f8c5a" [ 1375.198153] env[63297]: _type = "Task" [ 1375.198153] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.208130] env[63297]: DEBUG nova.compute.provider_tree [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.225466] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524536d5-ee54-6dd2-159e-afd4a42f8c5a, 'name': SearchDatastore_Task, 'duration_secs': 0.023678} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.225466] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.225466] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c4e96403-895c-479d-bfb2-274a87446bf9/c4e96403-895c-479d-bfb2-274a87446bf9.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1375.225466] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60868cfa-8db7-472b-a72b-4a2245474d52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.235595] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1375.235595] env[63297]: value = "task-1697328" [ 1375.235595] env[63297]: _type = "Task" [ 1375.235595] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.245532] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.383483] env[63297]: DEBUG nova.network.neutron [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Updating instance_info_cache with network_info: [{"id": "9ed4fb03-694f-424c-a7f8-b3f512a45b77", "address": "fa:16:3e:64:77:f8", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ed4fb03-69", "ovs_interfaceid": "9ed4fb03-694f-424c-a7f8-b3f512a45b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.400436] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "d2436717-7230-448f-b310-d062b1f11c52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.400738] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "d2436717-7230-448f-b310-d062b1f11c52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.542417] env[63297]: DEBUG nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1375.574681] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1375.575058] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1375.575233] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1375.575426] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1375.575641] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1375.575822] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1375.576081] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1375.576260] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1375.576477] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1375.576674] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1375.576853] env[63297]: DEBUG nova.virt.hardware [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1375.577850] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c55ed9d-bb9b-488c-8998-a6fb4e96b020 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.589899] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79273a16-5263-4bc6-a2d6-e581948c8bef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.717694] env[63297]: DEBUG nova.scheduler.client.report [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1375.749045] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697328, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509815} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.749045] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c4e96403-895c-479d-bfb2-274a87446bf9/c4e96403-895c-479d-bfb2-274a87446bf9.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1375.749045] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1375.749045] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40ba96d2-10e6-44f8-9465-6f82c2055d18 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.759232] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1375.759232] env[63297]: value = "task-1697329" [ 1375.759232] env[63297]: _type = "Task" [ 1375.759232] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.771380] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.888623] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "refresh_cache-192c3a5d-3a23-4f78-8dc7-a256b6d9381d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.888623] env[63297]: DEBUG nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Instance network_info: |[{"id": "9ed4fb03-694f-424c-a7f8-b3f512a45b77", "address": "fa:16:3e:64:77:f8", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ed4fb03-69", "ovs_interfaceid": "9ed4fb03-694f-424c-a7f8-b3f512a45b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1375.888623] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:77:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ed4fb03-694f-424c-a7f8-b3f512a45b77', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1375.896677] env[63297]: DEBUG oslo.service.loopingcall [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1375.896928] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1375.897167] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4297e71-a3f4-4493-a616-006478190a59 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.926312] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1375.926312] env[63297]: value = "task-1697330" [ 1375.926312] env[63297]: _type = "Task" [ 1375.926312] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.939113] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697330, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.972710] env[63297]: DEBUG nova.compute.manager [req-de1b06e3-7d04-4b04-9aa1-229419e5c55b req-5a8890ef-4eb1-4b29-b170-93f476a41e16 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Received event network-vif-plugged-a3fa7a52-2cf8-470b-951c-9f0de053dd1a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1375.972710] env[63297]: DEBUG oslo_concurrency.lockutils [req-de1b06e3-7d04-4b04-9aa1-229419e5c55b req-5a8890ef-4eb1-4b29-b170-93f476a41e16 service nova] Acquiring lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.972710] env[63297]: DEBUG oslo_concurrency.lockutils [req-de1b06e3-7d04-4b04-9aa1-229419e5c55b req-5a8890ef-4eb1-4b29-b170-93f476a41e16 service nova] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.972710] env[63297]: DEBUG oslo_concurrency.lockutils [req-de1b06e3-7d04-4b04-9aa1-229419e5c55b req-5a8890ef-4eb1-4b29-b170-93f476a41e16 service nova] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.972710] env[63297]: DEBUG nova.compute.manager [req-de1b06e3-7d04-4b04-9aa1-229419e5c55b req-5a8890ef-4eb1-4b29-b170-93f476a41e16 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] No waiting events found dispatching network-vif-plugged-a3fa7a52-2cf8-470b-951c-9f0de053dd1a {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1375.972710] env[63297]: WARNING nova.compute.manager [req-de1b06e3-7d04-4b04-9aa1-229419e5c55b req-5a8890ef-4eb1-4b29-b170-93f476a41e16 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Received unexpected event network-vif-plugged-a3fa7a52-2cf8-470b-951c-9f0de053dd1a for instance with vm_state building and task_state spawning. [ 1376.027882] env[63297]: DEBUG nova.network.neutron [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Successfully updated port: a3fa7a52-2cf8-470b-951c-9f0de053dd1a {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1376.226021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.226021] env[63297]: DEBUG nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1376.227279] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.311s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.228884] env[63297]: INFO nova.compute.claims [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1376.272827] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0709} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.273208] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1376.274030] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56d6a23-931d-4846-8008-56678fd8db5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.281392] env[63297]: DEBUG nova.compute.manager [req-46a26e57-05de-4208-b2da-2bf81e80cc39 req-1f2d21ea-4f59-4b25-84a1-52c6a3f0ba42 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Received event network-changed-9ed4fb03-694f-424c-a7f8-b3f512a45b77 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1376.281650] env[63297]: DEBUG nova.compute.manager [req-46a26e57-05de-4208-b2da-2bf81e80cc39 req-1f2d21ea-4f59-4b25-84a1-52c6a3f0ba42 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Refreshing instance network info cache due to event network-changed-9ed4fb03-694f-424c-a7f8-b3f512a45b77. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1376.281926] env[63297]: DEBUG oslo_concurrency.lockutils [req-46a26e57-05de-4208-b2da-2bf81e80cc39 req-1f2d21ea-4f59-4b25-84a1-52c6a3f0ba42 service nova] Acquiring lock "refresh_cache-192c3a5d-3a23-4f78-8dc7-a256b6d9381d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.282148] env[63297]: DEBUG oslo_concurrency.lockutils [req-46a26e57-05de-4208-b2da-2bf81e80cc39 req-1f2d21ea-4f59-4b25-84a1-52c6a3f0ba42 service nova] Acquired lock "refresh_cache-192c3a5d-3a23-4f78-8dc7-a256b6d9381d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.282553] env[63297]: DEBUG nova.network.neutron [req-46a26e57-05de-4208-b2da-2bf81e80cc39 req-1f2d21ea-4f59-4b25-84a1-52c6a3f0ba42 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Refreshing network info cache for port 9ed4fb03-694f-424c-a7f8-b3f512a45b77 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1376.312205] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] c4e96403-895c-479d-bfb2-274a87446bf9/c4e96403-895c-479d-bfb2-274a87446bf9.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1376.314495] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ce283be-163b-4f0e-b4bc-b74503d3fcc8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.341439] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1376.341439] env[63297]: value = "task-1697331" [ 1376.341439] env[63297]: _type = "Task" [ 1376.341439] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.351414] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697331, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.437464] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697330, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.531714] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "refresh_cache-6ce88b93-aa42-4f34-81fa-6c09c23ace81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.531714] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "refresh_cache-6ce88b93-aa42-4f34-81fa-6c09c23ace81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.531714] env[63297]: DEBUG nova.network.neutron [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1376.738701] env[63297]: DEBUG nova.compute.utils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1376.740206] env[63297]: DEBUG nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1376.740338] env[63297]: DEBUG nova.network.neutron [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1376.759385] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-ef851d71-788d-42f8-a824-5d30a89e957b-1c481d81-78fe-48f3-9eb8-9180cb78ecdf" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.759653] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-ef851d71-788d-42f8-a824-5d30a89e957b-1c481d81-78fe-48f3-9eb8-9180cb78ecdf" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.831187] env[63297]: DEBUG nova.policy [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20a91144677b4efba8ab91acd53d1c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33733e0599840618625ecb3e6bb6029', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1376.852627] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697331, 'name': ReconfigVM_Task, 'duration_secs': 0.361709} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.855260] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Reconfigured VM instance instance-00000032 to attach disk [datastore1] c4e96403-895c-479d-bfb2-274a87446bf9/c4e96403-895c-479d-bfb2-274a87446bf9.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1376.856699] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01e8d617-df41-46ae-9034-9f0f32144c8e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.865862] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1376.865862] env[63297]: value = "task-1697332" [ 1376.865862] env[63297]: _type = "Task" [ 1376.865862] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.876639] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697332, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.937794] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697330, 'name': CreateVM_Task, 'duration_secs': 0.685872} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.939990] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1376.941406] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.941664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.941961] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1376.942597] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-540941da-d614-4be3-86f3-280ad2512c1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.948799] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1376.948799] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525231d3-35b5-67c8-e958-c9e5f17f8b60" [ 1376.948799] env[63297]: _type = "Task" [ 1376.948799] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.961569] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525231d3-35b5-67c8-e958-c9e5f17f8b60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.099072] env[63297]: DEBUG nova.network.neutron [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1377.142743] env[63297]: DEBUG nova.network.neutron [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Successfully created port: 2d2fd3e6-5139-447f-b482-8492601c35f3 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1377.244597] env[63297]: DEBUG nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1377.258664] env[63297]: DEBUG nova.network.neutron [req-46a26e57-05de-4208-b2da-2bf81e80cc39 req-1f2d21ea-4f59-4b25-84a1-52c6a3f0ba42 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Updated VIF entry in instance network info cache for port 9ed4fb03-694f-424c-a7f8-b3f512a45b77. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1377.259034] env[63297]: DEBUG nova.network.neutron [req-46a26e57-05de-4208-b2da-2bf81e80cc39 req-1f2d21ea-4f59-4b25-84a1-52c6a3f0ba42 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Updating instance_info_cache with network_info: [{"id": "9ed4fb03-694f-424c-a7f8-b3f512a45b77", "address": "fa:16:3e:64:77:f8", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ed4fb03-69", "ovs_interfaceid": "9ed4fb03-694f-424c-a7f8-b3f512a45b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.272100] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.272100] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.272100] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6cbbca-7193-4e53-97fb-fbd4dd22a1c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.299825] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00378de1-7b31-4abd-8b2d-81713023a57c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.335809] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Reconfiguring VM to detach interface {{(pid=63297) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1377.339118] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db7e1eb6-311d-436f-8952-60c45f85bc20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.360551] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1377.360551] env[63297]: value = "task-1697333" [ 1377.360551] env[63297]: _type = "Task" [ 1377.360551] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.376029] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.382249] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697332, 'name': Rename_Task, 'duration_secs': 0.313408} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.382577] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1377.383100] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35d418a7-31df-47bb-b826-c94e1f800d5c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.392036] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1377.392036] env[63297]: value = "task-1697334" [ 1377.392036] env[63297]: _type = "Task" [ 1377.392036] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.407745] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697334, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.468022] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525231d3-35b5-67c8-e958-c9e5f17f8b60, 'name': SearchDatastore_Task, 'duration_secs': 0.009697} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.468022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.468022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.468341] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.468565] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1377.468774] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.468912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.469107] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1377.469816] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b73d7ca1-da11-48ea-a7ae-7abc826ea7b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.480400] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1377.480495] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1377.484313] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2871a5e3-f83c-4b5d-a6d0-4307d547c5e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.491684] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1377.491684] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f69db1-cd60-07d2-fe3a-a7c55fceb6e3" [ 1377.491684] env[63297]: _type = "Task" [ 1377.491684] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.497837] env[63297]: DEBUG nova.network.neutron [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Updating instance_info_cache with network_info: [{"id": "a3fa7a52-2cf8-470b-951c-9f0de053dd1a", "address": "fa:16:3e:d5:fe:77", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3fa7a52-2c", "ovs_interfaceid": "a3fa7a52-2cf8-470b-951c-9f0de053dd1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.502461] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f69db1-cd60-07d2-fe3a-a7c55fceb6e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.763699] env[63297]: DEBUG oslo_concurrency.lockutils [req-46a26e57-05de-4208-b2da-2bf81e80cc39 req-1f2d21ea-4f59-4b25-84a1-52c6a3f0ba42 service nova] Releasing lock "refresh_cache-192c3a5d-3a23-4f78-8dc7-a256b6d9381d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.873057] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.881617] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5445442d-4baf-44ee-8b39-31347e7c7e03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.889724] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8c7041-a5cb-400c-9347-a1d2fb7de834 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.923273] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fa4302-d2c3-4667-89de-e308c13c6bce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.928869] env[63297]: DEBUG oslo_vmware.api [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697334, 'name': PowerOnVM_Task, 'duration_secs': 0.472488} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.929480] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1377.929683] env[63297]: INFO nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1377.929858] env[63297]: DEBUG nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1377.930957] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068d4ec5-151d-4b74-91fa-1412ce550f43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.936820] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e41b41c-6b71-413a-b569-8467ac37a875 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.954430] env[63297]: DEBUG nova.compute.provider_tree [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1377.971211] env[63297]: DEBUG nova.compute.utils [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1378.003806] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "refresh_cache-6ce88b93-aa42-4f34-81fa-6c09c23ace81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.004145] env[63297]: DEBUG nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Instance network_info: |[{"id": "a3fa7a52-2cf8-470b-951c-9f0de053dd1a", "address": "fa:16:3e:d5:fe:77", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3fa7a52-2c", "ovs_interfaceid": "a3fa7a52-2cf8-470b-951c-9f0de053dd1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1378.004551] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f69db1-cd60-07d2-fe3a-a7c55fceb6e3, 'name': SearchDatastore_Task, 'duration_secs': 0.009956} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.004980] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:fe:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3fa7a52-2cf8-470b-951c-9f0de053dd1a', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1378.012607] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating folder: Project (2d5cb4b4799b4b8b99648e718dbc0254). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1378.013734] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f58c5eda-d567-471f-9aee-44794f65a28d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.015476] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd44d75f-94af-4339-905c-08590684fad9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.024093] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1378.024093] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52afb7e7-f562-0880-9046-333fa96fc83f" [ 1378.024093] env[63297]: _type = "Task" [ 1378.024093] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.026794] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Created folder: Project (2d5cb4b4799b4b8b99648e718dbc0254) in parent group-v353718. [ 1378.026990] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating folder: Instances. Parent ref: group-v353867. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1378.027580] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-115007d1-5f84-448e-bafe-69dbbdc83317 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.034735] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52afb7e7-f562-0880-9046-333fa96fc83f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.037356] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Created folder: Instances in parent group-v353867. [ 1378.037614] env[63297]: DEBUG oslo.service.loopingcall [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.037912] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1378.038176] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d2404b4-4775-429b-b68b-96c4dda72eb3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.059161] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1378.059161] env[63297]: value = "task-1697337" [ 1378.059161] env[63297]: _type = "Task" [ 1378.059161] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.069199] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697337, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.257519] env[63297]: DEBUG nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1378.287887] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1378.288147] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1378.288304] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1378.288483] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1378.288625] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1378.288772] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1378.288975] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1378.289142] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1378.289374] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1378.289543] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1378.289719] env[63297]: DEBUG nova.virt.hardware [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1378.291631] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a557f4f-6285-4283-8a5a-8e19a64dce78 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.303826] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90929aa6-26cd-4f22-9530-173bf9ce3a67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.372773] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.451667] env[63297]: INFO nova.compute.manager [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Took 43.52 seconds to build instance. [ 1378.457549] env[63297]: DEBUG nova.scheduler.client.report [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1378.474302] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.529793] env[63297]: DEBUG nova.compute.manager [req-6cc91ee2-2501-4f26-9089-3f9666649c10 req-8a64a584-1041-45fe-ba98-ab64081b8db4 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Received event network-changed-a3fa7a52-2cf8-470b-951c-9f0de053dd1a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1378.530076] env[63297]: DEBUG nova.compute.manager [req-6cc91ee2-2501-4f26-9089-3f9666649c10 req-8a64a584-1041-45fe-ba98-ab64081b8db4 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Refreshing instance network info cache due to event network-changed-a3fa7a52-2cf8-470b-951c-9f0de053dd1a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1378.530400] env[63297]: DEBUG oslo_concurrency.lockutils [req-6cc91ee2-2501-4f26-9089-3f9666649c10 req-8a64a584-1041-45fe-ba98-ab64081b8db4 service nova] Acquiring lock "refresh_cache-6ce88b93-aa42-4f34-81fa-6c09c23ace81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.530544] env[63297]: DEBUG oslo_concurrency.lockutils [req-6cc91ee2-2501-4f26-9089-3f9666649c10 req-8a64a584-1041-45fe-ba98-ab64081b8db4 service nova] Acquired lock "refresh_cache-6ce88b93-aa42-4f34-81fa-6c09c23ace81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.530793] env[63297]: DEBUG nova.network.neutron [req-6cc91ee2-2501-4f26-9089-3f9666649c10 req-8a64a584-1041-45fe-ba98-ab64081b8db4 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Refreshing network info cache for port a3fa7a52-2cf8-470b-951c-9f0de053dd1a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1378.535828] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52afb7e7-f562-0880-9046-333fa96fc83f, 'name': SearchDatastore_Task, 'duration_secs': 0.01037} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.536272] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.536655] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 192c3a5d-3a23-4f78-8dc7-a256b6d9381d/192c3a5d-3a23-4f78-8dc7-a256b6d9381d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1378.536917] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-024c0e61-73dc-4775-b721-2be0d00020ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.545981] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1378.545981] env[63297]: value = "task-1697338" [ 1378.545981] env[63297]: _type = "Task" [ 1378.545981] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.555080] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.568996] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697337, 'name': CreateVM_Task, 'duration_secs': 0.370306} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.569169] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1378.570127] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.570301] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.570624] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1378.570878] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e47a389-bceb-405f-9b69-e878b1fedc64 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.577533] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1378.577533] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5247f2e3-efd0-2e65-b989-7f080515a662" [ 1378.577533] env[63297]: _type = "Task" [ 1378.577533] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.586198] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5247f2e3-efd0-2e65-b989-7f080515a662, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.772588] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquiring lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.773221] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.773531] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquiring lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.773780] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.774148] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.776760] env[63297]: INFO nova.compute.manager [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Terminating instance [ 1378.778806] env[63297]: DEBUG nova.compute.manager [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1378.779205] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1378.780021] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5059c158-dfb2-48ec-8698-c3d6d268c352 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.790144] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1378.790517] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a17c6be3-c03c-4a19-9248-d944cea9bd55 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.800023] env[63297]: DEBUG oslo_vmware.api [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1378.800023] env[63297]: value = "task-1697339" [ 1378.800023] env[63297]: _type = "Task" [ 1378.800023] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.814104] env[63297]: DEBUG oslo_vmware.api [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1697339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.849202] env[63297]: DEBUG nova.network.neutron [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Successfully updated port: 2d2fd3e6-5139-447f-b482-8492601c35f3 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1378.876220] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.953953] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4b98e193-d2ba-47b3-9944-823e057c6351 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c4e96403-895c-479d-bfb2-274a87446bf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.697s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.962432] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.735s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.962965] env[63297]: DEBUG nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1378.969667] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.272s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.969887] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.974555] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.263s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.976815] env[63297]: INFO nova.compute.claims [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1379.015864] env[63297]: INFO nova.scheduler.client.report [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleted allocations for instance d15a7e98-755b-4c5c-ba34-dc5fc3f8846d [ 1379.057188] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697338, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497328} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.057188] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 192c3a5d-3a23-4f78-8dc7-a256b6d9381d/192c3a5d-3a23-4f78-8dc7-a256b6d9381d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1379.058190] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1379.058190] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3db416ee-38f7-4529-bc00-103f624e0835 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.068273] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1379.068273] env[63297]: value = "task-1697340" [ 1379.068273] env[63297]: _type = "Task" [ 1379.068273] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.081905] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.100266] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5247f2e3-efd0-2e65-b989-7f080515a662, 'name': SearchDatastore_Task, 'duration_secs': 0.009085} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.100632] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.100968] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1379.101323] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.101514] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.101741] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1379.102065] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c14a0b79-272e-4bb2-a56d-54db977c6cdc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.112526] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1379.113574] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1379.113918] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-514690e5-086b-4455-bffa-7bbfd686df9b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.120656] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1379.120656] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a8c796-4d05-0ab4-3518-625f2967f1ce" [ 1379.120656] env[63297]: _type = "Task" [ 1379.120656] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.131581] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a8c796-4d05-0ab4-3518-625f2967f1ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.310792] env[63297]: DEBUG oslo_vmware.api [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1697339, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.334267] env[63297]: DEBUG nova.network.neutron [req-6cc91ee2-2501-4f26-9089-3f9666649c10 req-8a64a584-1041-45fe-ba98-ab64081b8db4 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Updated VIF entry in instance network info cache for port a3fa7a52-2cf8-470b-951c-9f0de053dd1a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1379.334654] env[63297]: DEBUG nova.network.neutron [req-6cc91ee2-2501-4f26-9089-3f9666649c10 req-8a64a584-1041-45fe-ba98-ab64081b8db4 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Updating instance_info_cache with network_info: [{"id": "a3fa7a52-2cf8-470b-951c-9f0de053dd1a", "address": "fa:16:3e:d5:fe:77", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3fa7a52-2c", "ovs_interfaceid": "a3fa7a52-2cf8-470b-951c-9f0de053dd1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.352109] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.352260] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.352406] env[63297]: DEBUG nova.network.neutron [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1379.377087] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.458171] env[63297]: DEBUG nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1379.485465] env[63297]: DEBUG nova.compute.utils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1379.490980] env[63297]: DEBUG nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1379.491140] env[63297]: DEBUG nova.network.neutron [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1379.523820] env[63297]: DEBUG oslo_concurrency.lockutils [None req-38dc5c79-bd5b-4381-a93b-1908b3c3abf4 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "d15a7e98-755b-4c5c-ba34-dc5fc3f8846d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.786s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.566537] env[63297]: DEBUG nova.policy [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8767f029ef2847acb8679c8dda841e61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de74a055696b4dd69b88d08b52d327d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1379.584499] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095092} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.584672] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1379.585416] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca2ae62-d939-4eb8-84d3-a12e54d6bce4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.600640] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.601165] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.601165] env[63297]: INFO nova.compute.manager [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Attaching volume b0c8969b-afca-4185-99f2-ddbfd23d0d4e to /dev/sdb [ 1379.614388] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 192c3a5d-3a23-4f78-8dc7-a256b6d9381d/192c3a5d-3a23-4f78-8dc7-a256b6d9381d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1379.617929] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16a6b4e8-4147-4ffa-ab7c-825c6dfcae39 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.657465] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a8c796-4d05-0ab4-3518-625f2967f1ce, 'name': SearchDatastore_Task, 'duration_secs': 0.010839} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.660775] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1379.660775] env[63297]: value = "task-1697341" [ 1379.660775] env[63297]: _type = "Task" [ 1379.660775] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.661153] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1dd068a-b3ac-424e-ad73-d95b6b0ffd6e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.675589] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.676394] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1379.676394] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5207abcd-b890-f8b2-ffc4-f8223e657929" [ 1379.676394] env[63297]: _type = "Task" [ 1379.676394] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.680752] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5146b236-d68d-4577-a739-bd216d39d5d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.689608] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5207abcd-b890-f8b2-ffc4-f8223e657929, 'name': SearchDatastore_Task, 'duration_secs': 0.01253} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.691471] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.691755] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6ce88b93-aa42-4f34-81fa-6c09c23ace81/6ce88b93-aa42-4f34-81fa-6c09c23ace81.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1379.692100] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87475ac8-91f8-447e-9a27-07ef72018c32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.694512] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f903f3d-78c3-4e1d-9220-1a08baad6df9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.704360] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1379.704360] env[63297]: value = "task-1697342" [ 1379.704360] env[63297]: _type = "Task" [ 1379.704360] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.712414] env[63297]: DEBUG nova.virt.block_device [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Updating existing volume attachment record: 4052c256-d7a2-4ede-aae7-8ed7b3c7972d {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1379.721465] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.812209] env[63297]: DEBUG oslo_vmware.api [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1697339, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.838008] env[63297]: DEBUG oslo_concurrency.lockutils [req-6cc91ee2-2501-4f26-9089-3f9666649c10 req-8a64a584-1041-45fe-ba98-ab64081b8db4 service nova] Releasing lock "refresh_cache-6ce88b93-aa42-4f34-81fa-6c09c23ace81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.875522] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.915219] env[63297]: DEBUG nova.network.neutron [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1379.984154] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.989504] env[63297]: DEBUG nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1379.996695] env[63297]: DEBUG nova.network.neutron [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Successfully created port: 2b5c1a4a-c8b7-459a-b120-b193ed9337d5 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1380.146502] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "c4e96403-895c-479d-bfb2-274a87446bf9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.146861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c4e96403-895c-479d-bfb2-274a87446bf9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.147126] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "c4e96403-895c-479d-bfb2-274a87446bf9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.147335] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c4e96403-895c-479d-bfb2-274a87446bf9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.147615] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c4e96403-895c-479d-bfb2-274a87446bf9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.150319] env[63297]: INFO nova.compute.manager [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Terminating instance [ 1380.152638] env[63297]: DEBUG nova.compute.manager [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1380.152889] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1380.158443] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7b7553-b833-4355-a6e6-72bd873cedc8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.182965] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1380.183782] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8dafaab3-7ebf-4d56-bc96-446e99fd0268 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.190448] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.200779] env[63297]: DEBUG oslo_vmware.api [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1380.200779] env[63297]: value = "task-1697346" [ 1380.200779] env[63297]: _type = "Task" [ 1380.200779] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.239801] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697342, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.239926] env[63297]: DEBUG oslo_vmware.api [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697346, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.318611] env[63297]: DEBUG oslo_vmware.api [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1697339, 'name': PowerOffVM_Task, 'duration_secs': 1.369606} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.323981] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1380.323981] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1380.324539] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32202bb6-9a73-4afc-af0f-cbfb09dc1a1e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.377214] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.451369] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1380.452078] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1380.452497] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Deleting the datastore file [datastore1] ef57101e-1d8a-4ad5-ad68-cad2dbea33d1 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.453346] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4fe0cdb-b168-49b0-9786-012cc79c5e24 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.464678] env[63297]: DEBUG oslo_vmware.api [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for the task: (returnval){ [ 1380.464678] env[63297]: value = "task-1697348" [ 1380.464678] env[63297]: _type = "Task" [ 1380.464678] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.477680] env[63297]: DEBUG oslo_vmware.api [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1697348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.585294] env[63297]: DEBUG nova.network.neutron [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Updating instance_info_cache with network_info: [{"id": "2d2fd3e6-5139-447f-b482-8492601c35f3", "address": "fa:16:3e:51:68:d4", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d2fd3e6-51", "ovs_interfaceid": "2d2fd3e6-5139-447f-b482-8492601c35f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.686664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.686909] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.687153] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.687336] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.687499] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.689238] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697341, 'name': ReconfigVM_Task, 'duration_secs': 0.872024} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.690669] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 192c3a5d-3a23-4f78-8dc7-a256b6d9381d/192c3a5d-3a23-4f78-8dc7-a256b6d9381d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1380.694801] env[63297]: INFO nova.compute.manager [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Terminating instance [ 1380.696209] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab480203-c2b2-43d7-8b95-e35e67684a40 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.698252] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "e7fae121-174f-4955-a185-b3f92c6ab110" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.698461] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "e7fae121-174f-4955-a185-b3f92c6ab110" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.699453] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea23dfd-2652-4f88-bd27-fee2d5d170a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.702244] env[63297]: DEBUG nova.compute.manager [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1380.702435] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1380.703447] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343db2ab-21fe-4e9b-a683-307c606f76f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.715487] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8648706-a82a-4846-853f-493fd283cba7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.719731] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1380.719731] env[63297]: value = "task-1697349" [ 1380.719731] env[63297]: _type = "Task" [ 1380.719731] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.729399] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1380.729674] env[63297]: DEBUG oslo_vmware.api [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697346, 'name': PowerOffVM_Task, 'duration_secs': 0.298775} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.736611] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-259a596e-a160-4f6f-93f3-4319bb222380 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.738284] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1380.738487] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1380.765355] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f055a26a-a11b-4e9f-9129-a51a41571a87 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.770496] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d02687-5ab0-49fe-a8fe-0b1469ddbdcc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.782352] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697349, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.782699] env[63297]: DEBUG oslo_vmware.api [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1380.782699] env[63297]: value = "task-1697350" [ 1380.782699] env[63297]: _type = "Task" [ 1380.782699] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.782915] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74436} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.784221] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6ce88b93-aa42-4f34-81fa-6c09c23ace81/6ce88b93-aa42-4f34-81fa-6c09c23ace81.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1380.784672] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1380.788905] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99454552-9669-4a5d-98bd-255e291c57bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.801243] env[63297]: DEBUG oslo_vmware.api [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.804904] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1380.804904] env[63297]: value = "task-1697352" [ 1380.804904] env[63297]: _type = "Task" [ 1380.804904] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.806200] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072cf6b3-585e-42de-bcab-edbd868e2131 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.821743] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697352, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.829621] env[63297]: DEBUG nova.compute.provider_tree [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.877160] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.879095] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1380.879238] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1380.879402] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleting the datastore file [datastore1] c4e96403-895c-479d-bfb2-274a87446bf9 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.879698] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6753b17a-bd47-40b3-96a5-30297aed9f20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.887589] env[63297]: DEBUG oslo_vmware.api [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1380.887589] env[63297]: value = "task-1697353" [ 1380.887589] env[63297]: _type = "Task" [ 1380.887589] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.899344] env[63297]: DEBUG oslo_vmware.api [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.974468] env[63297]: DEBUG oslo_vmware.api [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Task: {'id': task-1697348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369304} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.974742] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1380.974950] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1380.975166] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1380.975345] env[63297]: INFO nova.compute.manager [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Took 2.20 seconds to destroy the instance on the hypervisor. [ 1380.975698] env[63297]: DEBUG oslo.service.loopingcall [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.975797] env[63297]: DEBUG nova.compute.manager [-] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1380.975881] env[63297]: DEBUG nova.network.neutron [-] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1381.008123] env[63297]: DEBUG nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1381.043783] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1381.044339] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1381.044420] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1381.045075] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1381.045075] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1381.045075] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1381.045298] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1381.045405] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1381.045629] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1381.045857] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1381.046049] env[63297]: DEBUG nova.virt.hardware [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1381.047073] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d0645a-1c55-4656-836e-eca0d93238b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.059248] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cf91f3-e6cd-4a3c-b631-fe0638e6a63f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.088022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.088594] env[63297]: DEBUG nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Instance network_info: |[{"id": "2d2fd3e6-5139-447f-b482-8492601c35f3", "address": "fa:16:3e:51:68:d4", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d2fd3e6-51", "ovs_interfaceid": "2d2fd3e6-5139-447f-b482-8492601c35f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1381.088950] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:68:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d2fd3e6-5139-447f-b482-8492601c35f3', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1381.096985] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating folder: Project (c33733e0599840618625ecb3e6bb6029). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1381.097411] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-720386f6-f2ef-4d83-b857-eed693974cad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.112779] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created folder: Project (c33733e0599840618625ecb3e6bb6029) in parent group-v353718. [ 1381.112779] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating folder: Instances. Parent ref: group-v353872. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1381.112779] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-424b302b-e594-489c-acc6-c5e2a623408c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.126082] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created folder: Instances in parent group-v353872. [ 1381.126368] env[63297]: DEBUG oslo.service.loopingcall [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.126580] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1381.126795] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fb56d89-cdf9-434d-ae37-5c36392db127 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.149775] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1381.149775] env[63297]: value = "task-1697356" [ 1381.149775] env[63297]: _type = "Task" [ 1381.149775] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.155454] env[63297]: DEBUG nova.compute.manager [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Received event network-vif-plugged-2d2fd3e6-5139-447f-b482-8492601c35f3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1381.155716] env[63297]: DEBUG oslo_concurrency.lockutils [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] Acquiring lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.155868] env[63297]: DEBUG oslo_concurrency.lockutils [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] Lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.156041] env[63297]: DEBUG oslo_concurrency.lockutils [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] Lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.156261] env[63297]: DEBUG nova.compute.manager [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] No waiting events found dispatching network-vif-plugged-2d2fd3e6-5139-447f-b482-8492601c35f3 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1381.157052] env[63297]: WARNING nova.compute.manager [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Received unexpected event network-vif-plugged-2d2fd3e6-5139-447f-b482-8492601c35f3 for instance with vm_state building and task_state spawning. [ 1381.157052] env[63297]: DEBUG nova.compute.manager [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Received event network-changed-2d2fd3e6-5139-447f-b482-8492601c35f3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1381.157052] env[63297]: DEBUG nova.compute.manager [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Refreshing instance network info cache due to event network-changed-2d2fd3e6-5139-447f-b482-8492601c35f3. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1381.157052] env[63297]: DEBUG oslo_concurrency.lockutils [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] Acquiring lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.157294] env[63297]: DEBUG oslo_concurrency.lockutils [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] Acquired lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.157294] env[63297]: DEBUG nova.network.neutron [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Refreshing network info cache for port 2d2fd3e6-5139-447f-b482-8492601c35f3 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1381.165981] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697356, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.247421] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697349, 'name': Rename_Task, 'duration_secs': 0.259207} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.247774] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1381.248049] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a86134fe-caee-412f-88cf-6d867f031185 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.259036] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1381.259036] env[63297]: value = "task-1697357" [ 1381.259036] env[63297]: _type = "Task" [ 1381.259036] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.268181] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.294751] env[63297]: DEBUG oslo_vmware.api [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697350, 'name': PowerOffVM_Task, 'duration_secs': 0.264712} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.295417] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1381.295782] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1381.296351] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f915cd28-87ba-428e-8fa3-5fa5db90be7a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.320697] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697352, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125559} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.322463] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1381.322604] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec50ce4-a39e-4a76-a357-64ad85f19375 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.350385] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 6ce88b93-aa42-4f34-81fa-6c09c23ace81/6ce88b93-aa42-4f34-81fa-6c09c23ace81.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1381.351177] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4067c64b-57be-40e5-a5d9-b709db09893c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.378955] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.381300] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1381.381516] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1381.381686] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Deleting the datastore file [datastore1] b5d34058-fa3e-4806-97e5-638bbbffaeb8 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1381.382014] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1381.382014] env[63297]: value = "task-1697359" [ 1381.382014] env[63297]: _type = "Task" [ 1381.382014] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.382273] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30d91a4a-f182-4e59-b1e1-5c1277725da7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.384881] env[63297]: ERROR nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [req-1c4f7646-00c8-4e95-81f3-0324ffabb71b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1c4f7646-00c8-4e95-81f3-0324ffabb71b"}]} [ 1381.398288] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697359, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.400056] env[63297]: DEBUG oslo_vmware.api [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1381.400056] env[63297]: value = "task-1697360" [ 1381.400056] env[63297]: _type = "Task" [ 1381.400056] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.406709] env[63297]: DEBUG oslo_vmware.api [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322628} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.407379] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1381.410317] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1381.410317] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1381.410317] env[63297]: INFO nova.compute.manager [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1381.410317] env[63297]: DEBUG oslo.service.loopingcall [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.410317] env[63297]: DEBUG nova.compute.manager [-] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1381.410317] env[63297]: DEBUG nova.network.neutron [-] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1381.413085] env[63297]: DEBUG oslo_vmware.api [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.428949] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1381.454511] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1381.454511] env[63297]: DEBUG nova.compute.provider_tree [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1381.476101] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1381.503211] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1381.665166] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697356, 'name': CreateVM_Task, 'duration_secs': 0.448264} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.671471] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1381.672980] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.673290] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.673651] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1381.673992] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6fb7038-7f94-4a0c-8cb3-b52b77f754d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.681258] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1381.681258] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cd9c4d-002b-e106-df4f-9ef2b550b941" [ 1381.681258] env[63297]: _type = "Task" [ 1381.681258] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.693781] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cd9c4d-002b-e106-df4f-9ef2b550b941, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.772163] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697357, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.856311] env[63297]: DEBUG nova.network.neutron [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Successfully updated port: 2b5c1a4a-c8b7-459a-b120-b193ed9337d5 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.887189] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.899028] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697359, 'name': ReconfigVM_Task, 'duration_secs': 0.329672} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.901623] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 6ce88b93-aa42-4f34-81fa-6c09c23ace81/6ce88b93-aa42-4f34-81fa-6c09c23ace81.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1381.903041] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a217407f-3958-4201-9bf5-fa0d3c287952 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.914145] env[63297]: DEBUG oslo_vmware.api [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177195} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.917980] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1381.918204] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1381.918385] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1381.918556] env[63297]: INFO nova.compute.manager [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1381.919181] env[63297]: DEBUG oslo.service.loopingcall [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.919181] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1381.919181] env[63297]: value = "task-1697361" [ 1381.919181] env[63297]: _type = "Task" [ 1381.919181] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.919436] env[63297]: DEBUG nova.compute.manager [-] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1381.919542] env[63297]: DEBUG nova.network.neutron [-] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1381.931099] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697361, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.119244] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c476641d-e34d-4ac6-abb4-0a98f6bdbe05 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.127308] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568c148d-6aea-480c-b51a-84aaa209deff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.163043] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bfde5e-5377-408f-b8fa-a586411c7d7c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.171789] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a58bd0d-287b-4f41-bc86-4076ed7fd509 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.192956] env[63297]: DEBUG nova.compute.provider_tree [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1382.207592] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cd9c4d-002b-e106-df4f-9ef2b550b941, 'name': SearchDatastore_Task, 'duration_secs': 0.011994} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.210965] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.210965] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1382.210965] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.210965] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.210965] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1382.210965] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1abcb7ec-0b75-4423-ae18-c3b5a1550423 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.224787] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "b261c90f-642d-42b7-8b79-d87eeaf0537a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.225147] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b261c90f-642d-42b7-8b79-d87eeaf0537a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.225425] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1382.225980] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1382.227836] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc97d4c1-936e-42a2-801f-a137af81b58e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.235590] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1382.235590] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52613179-a039-cfac-ae31-57d60bfdf46e" [ 1382.235590] env[63297]: _type = "Task" [ 1382.235590] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.246182] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52613179-a039-cfac-ae31-57d60bfdf46e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.270731] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697357, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.357138] env[63297]: DEBUG nova.network.neutron [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Updated VIF entry in instance network info cache for port 2d2fd3e6-5139-447f-b482-8492601c35f3. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1382.357138] env[63297]: DEBUG nova.network.neutron [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Updating instance_info_cache with network_info: [{"id": "2d2fd3e6-5139-447f-b482-8492601c35f3", "address": "fa:16:3e:51:68:d4", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d2fd3e6-51", "ovs_interfaceid": "2d2fd3e6-5139-447f-b482-8492601c35f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.358858] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.358969] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.359825] env[63297]: DEBUG nova.network.neutron [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.383574] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.433517] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697361, 'name': Rename_Task, 'duration_secs': 0.153289} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.433861] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.434016] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffb7b1a1-829b-4924-ab47-da45ef700c2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.444142] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1382.444142] env[63297]: value = "task-1697363" [ 1382.444142] env[63297]: _type = "Task" [ 1382.444142] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.456324] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697363, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.528497] env[63297]: DEBUG nova.network.neutron [-] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.627718] env[63297]: DEBUG nova.compute.manager [req-02ab1700-7082-4a9d-bbda-99a4c130fa49 req-50be5ff3-7da0-4b80-8efd-928e5a1cfade service nova] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Received event network-vif-deleted-fffa061f-0bb8-4fc9-b4a1-22e520b4f9d4 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1382.718918] env[63297]: DEBUG nova.network.neutron [-] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.726767] env[63297]: ERROR nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [req-59c9badc-f6f9-4b5b-98c9-45caeee123ce] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-59c9badc-f6f9-4b5b-98c9-45caeee123ce"}]} [ 1382.744855] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1382.752535] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52613179-a039-cfac-ae31-57d60bfdf46e, 'name': SearchDatastore_Task, 'duration_secs': 0.037553} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.756369] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d33733b0-ff83-44a9-9f11-e5de41b68e9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.771622] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697357, 'name': PowerOnVM_Task} progress is 81%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.773602] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1382.773602] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52888f32-f132-77ff-9cd6-8787a99d8274" [ 1382.773602] env[63297]: _type = "Task" [ 1382.773602] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.778819] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1382.779043] env[63297]: DEBUG nova.compute.provider_tree [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1382.789573] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52888f32-f132-77ff-9cd6-8787a99d8274, 'name': SearchDatastore_Task, 'duration_secs': 0.010978} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.790790] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.790790] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 765f3232-f3f9-4d9b-92f2-fb6603f2a90a/765f3232-f3f9-4d9b-92f2-fb6603f2a90a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1382.790790] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc000fb5-6226-4f33-8235-3d02db0c181a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.795652] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1382.804285] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1382.804285] env[63297]: value = "task-1697364" [ 1382.804285] env[63297]: _type = "Task" [ 1382.804285] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.811894] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.835817] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1382.861549] env[63297]: DEBUG oslo_concurrency.lockutils [req-fb6d2586-fb62-4e74-8456-bf072f98de6c req-ae74e521-9855-4e34-bc78-6e4fd2751f55 service nova] Releasing lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.882888] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.927662] env[63297]: DEBUG nova.network.neutron [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.959953] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697363, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.035562] env[63297]: INFO nova.compute.manager [-] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Took 1.63 seconds to deallocate network for instance. [ 1383.196148] env[63297]: DEBUG nova.network.neutron [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Updating instance_info_cache with network_info: [{"id": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "address": "fa:16:3e:74:e7:bc", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b5c1a4a-c8", "ovs_interfaceid": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.226877] env[63297]: INFO nova.compute.manager [-] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Took 2.25 seconds to deallocate network for instance. [ 1383.229057] env[63297]: DEBUG nova.compute.manager [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Received event network-vif-plugged-2b5c1a4a-c8b7-459a-b120-b193ed9337d5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.229720] env[63297]: DEBUG oslo_concurrency.lockutils [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] Acquiring lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.229986] env[63297]: DEBUG oslo_concurrency.lockutils [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] Lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.230212] env[63297]: DEBUG oslo_concurrency.lockutils [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] Lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.230422] env[63297]: DEBUG nova.compute.manager [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] No waiting events found dispatching network-vif-plugged-2b5c1a4a-c8b7-459a-b120-b193ed9337d5 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1383.230645] env[63297]: WARNING nova.compute.manager [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Received unexpected event network-vif-plugged-2b5c1a4a-c8b7-459a-b120-b193ed9337d5 for instance with vm_state building and task_state spawning. [ 1383.230820] env[63297]: DEBUG nova.compute.manager [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Received event network-changed-2b5c1a4a-c8b7-459a-b120-b193ed9337d5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.230994] env[63297]: DEBUG nova.compute.manager [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Refreshing instance network info cache due to event network-changed-2b5c1a4a-c8b7-459a-b120-b193ed9337d5. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1383.231177] env[63297]: DEBUG oslo_concurrency.lockutils [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] Acquiring lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.282047] env[63297]: DEBUG oslo_vmware.api [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697357, 'name': PowerOnVM_Task, 'duration_secs': 1.765582} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.282755] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1383.282981] env[63297]: INFO nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Took 10.39 seconds to spawn the instance on the hypervisor. [ 1383.283197] env[63297]: DEBUG nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1383.284250] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ef3a35-e6f9-4faf-8a19-107cf0ba4ab9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.312865] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497442} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.315959] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 765f3232-f3f9-4d9b-92f2-fb6603f2a90a/765f3232-f3f9-4d9b-92f2-fb6603f2a90a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1383.316271] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1383.316815] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a28ed4a-ca72-46c9-acf0-7205ffcd024f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.324902] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1383.324902] env[63297]: value = "task-1697365" [ 1383.324902] env[63297]: _type = "Task" [ 1383.324902] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.335274] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697365, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.381703] env[63297]: DEBUG oslo_vmware.api [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697333, 'name': ReconfigVM_Task, 'duration_secs': 5.866703} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.381958] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.382199] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Reconfigured VM to detach interface {{(pid=63297) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1383.460909] env[63297]: DEBUG oslo_vmware.api [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697363, 'name': PowerOnVM_Task, 'duration_secs': 0.879096} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.460909] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1383.460909] env[63297]: INFO nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Took 7.92 seconds to spawn the instance on the hypervisor. [ 1383.460909] env[63297]: DEBUG nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1383.460909] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0853c8-051f-449b-aabe-4bd039355955 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.514542] env[63297]: DEBUG nova.network.neutron [-] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.537439] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b14147-e64f-48d7-89d7-bd73116e7d32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.545780] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3c3134-f1a1-4342-8fdb-d227297260fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.550045] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.579441] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99da1d4-d998-4d86-8b77-febe18afdc2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.589544] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d9b5fa-a413-4196-aec1-dc05ae197335 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.605529] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Acquiring lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.605792] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.605994] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Acquiring lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.606393] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.606568] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.608309] env[63297]: DEBUG nova.compute.provider_tree [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1383.613032] env[63297]: INFO nova.compute.manager [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Terminating instance [ 1383.614274] env[63297]: DEBUG nova.compute.manager [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1383.616612] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1383.616612] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f738d9da-a873-4cf8-abf5-57f49621cc0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.627076] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1383.627076] env[63297]: value = "task-1697366" [ 1383.627076] env[63297]: _type = "Task" [ 1383.627076] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.640397] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697366, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.703465] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.703832] env[63297]: DEBUG nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Instance network_info: |[{"id": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "address": "fa:16:3e:74:e7:bc", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b5c1a4a-c8", "ovs_interfaceid": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1383.704439] env[63297]: DEBUG oslo_concurrency.lockutils [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] Acquired lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.704439] env[63297]: DEBUG nova.network.neutron [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Refreshing network info cache for port 2b5c1a4a-c8b7-459a-b120-b193ed9337d5 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.705577] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:e7:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b5c1a4a-c8b7-459a-b120-b193ed9337d5', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.713508] env[63297]: DEBUG oslo.service.loopingcall [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.714029] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1383.714232] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a6b8001-d2ac-420f-b6c2-adbc77c4b3d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.738626] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.738626] env[63297]: value = "task-1697367" [ 1383.738626] env[63297]: _type = "Task" [ 1383.738626] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.739857] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.750756] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697367, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.806702] env[63297]: INFO nova.compute.manager [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Took 46.39 seconds to build instance. [ 1383.836892] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697365, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109995} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.836892] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1383.839039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8bd69c-7573-4bbc-8d28-cc1a10ba7ec7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.865167] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 765f3232-f3f9-4d9b-92f2-fb6603f2a90a/765f3232-f3f9-4d9b-92f2-fb6603f2a90a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1383.865830] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf12fe2c-ec53-415e-ae58-feba3a662c4e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.890282] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1383.890282] env[63297]: value = "task-1697368" [ 1383.890282] env[63297]: _type = "Task" [ 1383.890282] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.901226] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697368, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.987027] env[63297]: INFO nova.compute.manager [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Took 46.02 seconds to build instance. [ 1384.019497] env[63297]: INFO nova.compute.manager [-] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Took 2.10 seconds to deallocate network for instance. [ 1384.139365] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697366, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.157259] env[63297]: DEBUG nova.scheduler.client.report [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1384.157546] env[63297]: DEBUG nova.compute.provider_tree [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 75 to 76 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1384.157724] env[63297]: DEBUG nova.compute.provider_tree [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1384.256248] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697367, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.269789] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1384.270037] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353871', 'volume_id': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'name': 'volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b853b581-ea46-4455-8cdb-6ea2f31c22be', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'serial': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1384.271287] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750dcf52-fb5f-411f-825c-dd57a4bcc133 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.294152] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84d875d-6ca6-4ff2-b5ac-85f196b3fe21 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.319588] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3b033177-8841-4fe4-84d1-d7650d99f2d6 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.491s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.327430] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e/volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1384.331608] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aae0d724-5bc7-446c-8828-dd7782564200 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.352598] env[63297]: DEBUG oslo_vmware.api [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1384.352598] env[63297]: value = "task-1697369" [ 1384.352598] env[63297]: _type = "Task" [ 1384.352598] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.366932] env[63297]: DEBUG oslo_vmware.api [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697369, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.402154] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697368, 'name': ReconfigVM_Task, 'duration_secs': 0.435015} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.402503] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 765f3232-f3f9-4d9b-92f2-fb6603f2a90a/765f3232-f3f9-4d9b-92f2-fb6603f2a90a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.403085] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb123863-7f0b-4f90-bf2f-6e42694458c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.412644] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1384.412644] env[63297]: value = "task-1697370" [ 1384.412644] env[63297]: _type = "Task" [ 1384.412644] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.424568] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697370, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.490174] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42b36a95-4cce-4ae9-94a1-4a70d1972f1a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.300s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.526166] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.566881] env[63297]: DEBUG nova.network.neutron [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Updated VIF entry in instance network info cache for port 2b5c1a4a-c8b7-459a-b120-b193ed9337d5. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1384.567268] env[63297]: DEBUG nova.network.neutron [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Updating instance_info_cache with network_info: [{"id": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "address": "fa:16:3e:74:e7:bc", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b5c1a4a-c8", "ovs_interfaceid": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.639999] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697366, 'name': PowerOffVM_Task, 'duration_secs': 0.515858} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.643085] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1384.643085] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1384.643085] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353799', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'name': 'volume-1cf3188d-0b66-4933-9595-057e902e5d2b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e6b1296-9e19-4047-9c38-dc94c686d0cb', 'attached_at': '', 'detached_at': '', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'serial': '1cf3188d-0b66-4933-9595-057e902e5d2b'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1384.643085] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8441255-7d54-4317-9b00-8928c640916b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.661572] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feddce89-2901-4b19-b6ed-68a10a10a1b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.664803] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.693s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.665488] env[63297]: DEBUG nova.compute.manager [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1384.667999] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.211s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.668580] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.670256] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.472s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.671675] env[63297]: INFO nova.compute.claims [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1384.679947] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87afecf-ff28-4981-96a5-6b8bc2a3ddfd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.704165] env[63297]: INFO nova.scheduler.client.report [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Deleted allocations for instance 754e64ec-b6fa-49d8-9de6-ef38918378fd [ 1384.705029] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7f8b33-b509-4749-9c8d-693e924d80ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.731253] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] The volume has not been displaced from its original location: [datastore1] volume-1cf3188d-0b66-4933-9595-057e902e5d2b/volume-1cf3188d-0b66-4933-9595-057e902e5d2b.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1384.737253] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Reconfiguring VM instance instance-0000002c to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1384.739547] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.739695] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.739857] env[63297]: DEBUG nova.network.neutron [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1384.740938] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0389f304-6f7b-4a36-bd77-b1e75633e894 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.754937] env[63297]: DEBUG nova.compute.manager [req-21b95461-a666-4313-a465-355a1f975077 req-d7f791ec-1eb4-44a8-8375-ce2518ca362c service nova] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Received event network-vif-deleted-ba02b0f2-d414-4714-b54a-10f89df1af3a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1384.766137] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697367, 'name': CreateVM_Task, 'duration_secs': 1.014568} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.767352] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1384.767914] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1384.767914] env[63297]: value = "task-1697371" [ 1384.767914] env[63297]: _type = "Task" [ 1384.767914] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.768529] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.768678] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.768981] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1384.769285] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-871772a5-ae46-4728-bad8-98ecb313cbae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.778144] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1384.778144] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dae440-1a1b-47b0-79ae-1726d0d171f4" [ 1384.778144] env[63297]: _type = "Task" [ 1384.778144] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.784684] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697371, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.790528] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dae440-1a1b-47b0-79ae-1726d0d171f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.845476] env[63297]: DEBUG nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1384.863594] env[63297]: DEBUG oslo_vmware.api [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697369, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.923661] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697370, 'name': Rename_Task, 'duration_secs': 0.217061} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.923944] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1384.924644] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6dcc48e-cf77-4d4b-8ad2-55401ce06500 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.934718] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.934933] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.935157] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.935375] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.935522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.937252] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1384.937252] env[63297]: value = "task-1697372" [ 1384.937252] env[63297]: _type = "Task" [ 1384.937252] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.937872] env[63297]: INFO nova.compute.manager [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Terminating instance [ 1384.942598] env[63297]: DEBUG nova.compute.manager [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1384.942795] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1384.943593] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5c3e8b-51f4-4ad8-86be-e820d88c3fe6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.960076] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.962884] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1384.962884] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11f39e9c-20a2-4bd5-9744-2fdfc70303b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.970623] env[63297]: DEBUG oslo_vmware.api [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1384.970623] env[63297]: value = "task-1697373" [ 1384.970623] env[63297]: _type = "Task" [ 1384.970623] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.980738] env[63297]: DEBUG oslo_vmware.api [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.985085] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.985310] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.994041] env[63297]: DEBUG nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1385.069947] env[63297]: DEBUG oslo_concurrency.lockutils [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] Releasing lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.070286] env[63297]: DEBUG nova.compute.manager [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Received event network-vif-deleted-e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.070522] env[63297]: INFO nova.compute.manager [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Neutron deleted interface e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c; detaching it from the instance and deleting it from the info cache [ 1385.070754] env[63297]: DEBUG nova.network.neutron [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.176960] env[63297]: DEBUG nova.compute.utils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1385.180791] env[63297]: DEBUG nova.compute.manager [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Not allocating networking since 'none' was specified. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1385.218092] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7db1e58f-a7b6-4b4f-bb7d-55565d64d386 tempest-ServersTestFqdnHostnames-1030814516 tempest-ServersTestFqdnHostnames-1030814516-project-member] Lock "754e64ec-b6fa-49d8-9de6-ef38918378fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.958s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.283216] env[63297]: DEBUG nova.compute.manager [req-3f92e960-7e22-4bc8-b75b-8ad3e9d75c5c req-35804d2c-49dc-4635-a186-2b9d50e7af49 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received event network-vif-deleted-1c481d81-78fe-48f3-9eb8-9180cb78ecdf {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.283473] env[63297]: INFO nova.compute.manager [req-3f92e960-7e22-4bc8-b75b-8ad3e9d75c5c req-35804d2c-49dc-4635-a186-2b9d50e7af49 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Neutron deleted interface 1c481d81-78fe-48f3-9eb8-9180cb78ecdf; detaching it from the instance and deleting it from the info cache [ 1385.284049] env[63297]: DEBUG nova.network.neutron [req-3f92e960-7e22-4bc8-b75b-8ad3e9d75c5c req-35804d2c-49dc-4635-a186-2b9d50e7af49 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [{"id": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "address": "fa:16:3e:d1:15:40", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14298cd9-89", "ovs_interfaceid": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.286044] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697371, 'name': ReconfigVM_Task, 'duration_secs': 0.209822} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.290051] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Reconfigured VM instance instance-0000002c to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1385.298322] env[63297]: DEBUG oslo_concurrency.lockutils [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.298557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.298733] env[63297]: DEBUG nova.compute.manager [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1385.298998] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17c0376b-68a4-4fec-948d-12db755c5887 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.311145] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5cee343-193d-452b-ac53-d6a0dcf86798 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.322077] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dae440-1a1b-47b0-79ae-1726d0d171f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013611} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.322077] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.322077] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1385.322077] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.322077] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.322077] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1385.322077] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a282119e-4d82-4d6f-b645-5ec6fb9b851c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.327613] env[63297]: DEBUG nova.compute.manager [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1385.328191] env[63297]: DEBUG nova.objects.instance [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lazy-loading 'flavor' on Instance uuid 6ce88b93-aa42-4f34-81fa-6c09c23ace81 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1385.329809] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1385.329809] env[63297]: value = "task-1697374" [ 1385.329809] env[63297]: _type = "Task" [ 1385.329809] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.340260] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697374, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.341751] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1385.341939] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1385.342686] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98e02e84-a172-4ac1-a3d3-855fc07f54ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.351483] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1385.351483] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a1887a-d181-2d87-be58-47d9f448bb61" [ 1385.351483] env[63297]: _type = "Task" [ 1385.351483] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.372742] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a1887a-d181-2d87-be58-47d9f448bb61, 'name': SearchDatastore_Task, 'duration_secs': 0.013851} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.377086] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.378009] env[63297]: DEBUG oslo_vmware.api [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697369, 'name': ReconfigVM_Task, 'duration_secs': 0.603984} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.378257] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65ea01ce-e9f1-4126-bb44-329b494c113e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.381073] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Reconfigured VM instance instance-00000021 to attach disk [datastore1] volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e/volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1385.387954] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24ada709-08c8-4e20-9ab4-9362dcff60cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.403614] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1385.403614] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ab4a34-45c3-191d-8411-9bf5a4e4647b" [ 1385.403614] env[63297]: _type = "Task" [ 1385.403614] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.409540] env[63297]: DEBUG oslo_vmware.api [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1385.409540] env[63297]: value = "task-1697375" [ 1385.409540] env[63297]: _type = "Task" [ 1385.409540] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.418797] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ab4a34-45c3-191d-8411-9bf5a4e4647b, 'name': SearchDatastore_Task, 'duration_secs': 0.01122} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.426025] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.426025] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 63785911-ea55-4aeb-9ba2-6cea5ddd9cae/63785911-ea55-4aeb-9ba2-6cea5ddd9cae.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1385.426025] env[63297]: DEBUG oslo_vmware.api [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.426025] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-250535f1-a089-4cd0-89ef-01ffd0008622 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.431716] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1385.431716] env[63297]: value = "task-1697376" [ 1385.431716] env[63297]: _type = "Task" [ 1385.431716] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.443786] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.450938] env[63297]: DEBUG oslo_vmware.api [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697372, 'name': PowerOnVM_Task, 'duration_secs': 0.484589} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.451478] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1385.451478] env[63297]: INFO nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Took 7.19 seconds to spawn the instance on the hypervisor. [ 1385.451627] env[63297]: DEBUG nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1385.452430] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da8c038-b29a-4d8f-9f22-a5693e3f0399 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.481043] env[63297]: DEBUG oslo_vmware.api [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697373, 'name': PowerOffVM_Task, 'duration_secs': 0.320384} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.481532] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1385.481635] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.481885] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44205c80-c1b7-4caf-b5fd-e4848b4a3834 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.495669] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "ef851d71-788d-42f8-a824-5d30a89e957b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.495669] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "ef851d71-788d-42f8-a824-5d30a89e957b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.495669] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.495669] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.495669] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "ef851d71-788d-42f8-a824-5d30a89e957b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.500143] env[63297]: INFO nova.compute.manager [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Terminating instance [ 1385.503036] env[63297]: DEBUG nova.compute.manager [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1385.503262] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1385.504317] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d37616-0479-42bf-a0f7-d1b4a64245c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.513613] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1385.513800] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53666893-8e69-4c53-b624-ac95f52c6d68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.518453] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.522882] env[63297]: DEBUG oslo_vmware.api [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1385.522882] env[63297]: value = "task-1697378" [ 1385.522882] env[63297]: _type = "Task" [ 1385.522882] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.532751] env[63297]: DEBUG oslo_vmware.api [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.574593] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1385.575328] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1385.575542] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleting the datastore file [datastore1] 192c3a5d-3a23-4f78-8dc7-a256b6d9381d {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1385.578848] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfd19a3b-872f-4dc8-a88f-d41ad42138ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.581038] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e236505-5cc5-4769-b930-f6f428d790d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.591316] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2717742-c9a8-4f62-bc77-77eb8cefb30c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.603249] env[63297]: DEBUG oslo_vmware.api [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1385.603249] env[63297]: value = "task-1697379" [ 1385.603249] env[63297]: _type = "Task" [ 1385.603249] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.612249] env[63297]: DEBUG oslo_vmware.api [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.633407] env[63297]: DEBUG nova.compute.manager [req-81c06e97-e92a-4115-9e07-123788812b27 req-3114384f-e7ab-4618-82bf-b7dfc7441a20 service nova] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Detach interface failed, port_id=e1fefdf1-1cd1-42b3-a004-4df6ea41fc3c, reason: Instance b5d34058-fa3e-4806-97e5-638bbbffaeb8 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1385.685022] env[63297]: DEBUG nova.compute.manager [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1385.786292] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f92e960-7e22-4bc8-b75b-8ad3e9d75c5c req-35804d2c-49dc-4635-a186-2b9d50e7af49 service nova] Acquiring lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.822663] env[63297]: INFO nova.network.neutron [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Port 1c481d81-78fe-48f3-9eb8-9180cb78ecdf from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1385.823084] env[63297]: DEBUG nova.network.neutron [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [{"id": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "address": "fa:16:3e:d1:15:40", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14298cd9-89", "ovs_interfaceid": "14298cd9-8999-4142-9f1f-7a512e4a09d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.842365] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1385.843459] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2277cb99-a59b-493e-a6be-0d8460ce037b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.849508] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697374, 'name': ReconfigVM_Task, 'duration_secs': 0.176729} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.850198] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353799', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'name': 'volume-1cf3188d-0b66-4933-9595-057e902e5d2b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4e6b1296-9e19-4047-9c38-dc94c686d0cb', 'attached_at': '', 'detached_at': '', 'volume_id': '1cf3188d-0b66-4933-9595-057e902e5d2b', 'serial': '1cf3188d-0b66-4933-9595-057e902e5d2b'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1385.850535] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1385.851443] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db999efe-8f8b-4711-9e77-00654ab21abd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.859731] env[63297]: DEBUG oslo_vmware.api [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1385.859731] env[63297]: value = "task-1697380" [ 1385.859731] env[63297]: _type = "Task" [ 1385.859731] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.866023] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.866792] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d755f21b-6dce-459f-a506-c19e75d92dd4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.873091] env[63297]: DEBUG oslo_vmware.api [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.923308] env[63297]: DEBUG oslo_vmware.api [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697375, 'name': ReconfigVM_Task, 'duration_secs': 0.167967} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.923670] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353871', 'volume_id': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'name': 'volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b853b581-ea46-4455-8cdb-6ea2f31c22be', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'serial': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1385.946279] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697376, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.976839] env[63297]: INFO nova.compute.manager [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Took 43.45 seconds to build instance. [ 1385.990760] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1385.991134] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1385.991229] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Deleting the datastore file [datastore1] 4e6b1296-9e19-4047-9c38-dc94c686d0cb {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1385.991495] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-643a66a5-b4d3-448f-80ba-ccce26cc0a2a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.001434] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for the task: (returnval){ [ 1386.001434] env[63297]: value = "task-1697382" [ 1386.001434] env[63297]: _type = "Task" [ 1386.001434] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.015061] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.038573] env[63297]: DEBUG oslo_vmware.api [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697378, 'name': PowerOffVM_Task, 'duration_secs': 0.223646} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.039027] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1386.039308] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1386.039648] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57a14a3a-8ba4-4f77-8623-12e5e9b964ed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.119758] env[63297]: DEBUG oslo_vmware.api [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.496279} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.119870] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1386.120126] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1386.120387] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1386.120622] env[63297]: INFO nova.compute.manager [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1386.120968] env[63297]: DEBUG oslo.service.loopingcall [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.121375] env[63297]: DEBUG nova.compute.manager [-] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1386.121450] env[63297]: DEBUG nova.network.neutron [-] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1386.140289] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1386.140530] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1386.140810] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleting the datastore file [datastore1] ef851d71-788d-42f8-a824-5d30a89e957b {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1386.141138] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34b5a9d4-2832-47c5-a3df-e84fc018eb0d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.154374] env[63297]: DEBUG oslo_vmware.api [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1386.154374] env[63297]: value = "task-1697384" [ 1386.154374] env[63297]: _type = "Task" [ 1386.154374] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.163041] env[63297]: DEBUG oslo_vmware.api [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.326768] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.349705] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab4e40a-7f12-420b-adf0-e66c76dfda94 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.360618] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a552986-c27a-44c3-9a3a-f446afb804e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.378939] env[63297]: DEBUG oslo_vmware.api [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697380, 'name': PowerOffVM_Task, 'duration_secs': 0.282167} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.415185] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1386.415578] env[63297]: DEBUG nova.compute.manager [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1386.417198] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d98891d-a66b-416b-bdd4-024ba462b3fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.420595] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990943e1-f2fb-4906-bd91-328935a8cfec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.439365] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fe04cd-c1e6-413f-a664-92663150811d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.467240] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1386.471786] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697376, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565807} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.472643] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 63785911-ea55-4aeb-9ba2-6cea5ddd9cae/63785911-ea55-4aeb-9ba2-6cea5ddd9cae.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1386.472744] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1386.474353] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-10ec9d2c-c962-4aa7-8125-7a798f8af0d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.478442] env[63297]: DEBUG oslo_concurrency.lockutils [None req-754fc0f0-8f30-44e5-9739-8b149f0b8dbf tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.118s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.483081] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1386.483081] env[63297]: value = "task-1697385" [ 1386.483081] env[63297]: _type = "Task" [ 1386.483081] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.501306] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697385, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.516791] env[63297]: DEBUG oslo_vmware.api [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Task: {'id': task-1697382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120607} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.519062] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1386.519274] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1386.519451] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1386.519625] env[63297]: INFO nova.compute.manager [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Took 2.91 seconds to destroy the instance on the hypervisor. [ 1386.523549] env[63297]: DEBUG oslo.service.loopingcall [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.523549] env[63297]: DEBUG nova.compute.manager [-] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1386.523549] env[63297]: DEBUG nova.network.neutron [-] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1386.666956] env[63297]: DEBUG oslo_vmware.api [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292242} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.667281] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1386.667504] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1386.667687] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1386.667872] env[63297]: INFO nova.compute.manager [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1386.668130] env[63297]: DEBUG oslo.service.loopingcall [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.668322] env[63297]: DEBUG nova.compute.manager [-] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1386.668428] env[63297]: DEBUG nova.network.neutron [-] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1386.696626] env[63297]: DEBUG nova.compute.manager [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1386.726608] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1386.726855] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1386.727017] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1386.727214] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1386.727394] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1386.727547] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1386.727757] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1386.727912] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1386.728259] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1386.728469] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1386.728651] env[63297]: DEBUG nova.virt.hardware [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1386.729510] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c920e7-38c3-4946-aae1-3fc24c34aa40 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.741267] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c9067e-8ba6-4338-a82d-f17124601a8a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.757411] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1386.764909] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Creating folder: Project (bd433f5f7e2d4ab7a38cedc3b86a106e). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.765333] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e03318a-9e79-4f81-a00f-94716ce34955 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.777575] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Created folder: Project (bd433f5f7e2d4ab7a38cedc3b86a106e) in parent group-v353718. [ 1386.777765] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Creating folder: Instances. Parent ref: group-v353876. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.778013] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00f32304-a4a8-4ee3-9e04-dd4fb66f4cbd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.789130] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Created folder: Instances in parent group-v353876. [ 1386.789385] env[63297]: DEBUG oslo.service.loopingcall [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.789918] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1386.790150] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02b3e55d-e73a-4357-b85f-b1be80d6eca6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.808700] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1386.808700] env[63297]: value = "task-1697388" [ 1386.808700] env[63297]: _type = "Task" [ 1386.808700] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.817458] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697388, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.832565] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7d70586c-943c-4ce4-b0f9-481842b311a1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-ef851d71-788d-42f8-a824-5d30a89e957b-1c481d81-78fe-48f3-9eb8-9180cb78ecdf" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.073s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.948595] env[63297]: DEBUG oslo_concurrency.lockutils [None req-80a059ed-493c-457e-a450-8431efc7ccff tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.650s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.981327] env[63297]: DEBUG nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1386.994274] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697385, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075586} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.994547] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1386.995410] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b65be8-646e-40b1-98ce-918b2d99df71 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.015644] env[63297]: DEBUG nova.objects.instance [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lazy-loading 'flavor' on Instance uuid b853b581-ea46-4455-8cdb-6ea2f31c22be {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1387.026986] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 63785911-ea55-4aeb-9ba2-6cea5ddd9cae/63785911-ea55-4aeb-9ba2-6cea5ddd9cae.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1387.030774] env[63297]: ERROR nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [req-a809fdfd-8bcb-41ed-9032-a6b93013cbe5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a809fdfd-8bcb-41ed-9032-a6b93013cbe5"}]} [ 1387.031143] env[63297]: DEBUG nova.network.neutron [-] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.032487] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9d6b24e-e604-484a-8693-490c40dad6ed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.060330] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1387.060330] env[63297]: value = "task-1697389" [ 1387.060330] env[63297]: _type = "Task" [ 1387.060330] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.069280] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.073028] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1387.098222] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1387.099363] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1387.117965] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1387.146901] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1387.322857] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697388, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.501569] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.539815] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dddda4dd-5fb6-4efc-b31b-85ccc220f4bc tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.937s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.552131] env[63297]: INFO nova.compute.manager [-] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Took 1.43 seconds to deallocate network for instance. [ 1387.575248] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697389, 'name': ReconfigVM_Task, 'duration_secs': 0.432776} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.578059] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 63785911-ea55-4aeb-9ba2-6cea5ddd9cae/63785911-ea55-4aeb-9ba2-6cea5ddd9cae.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1387.579252] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93693eda-2c67-4016-9ae2-32df66d26608 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.589958] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1387.589958] env[63297]: value = "task-1697390" [ 1387.589958] env[63297]: _type = "Task" [ 1387.589958] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.603595] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697390, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.760985] env[63297]: DEBUG nova.compute.manager [req-708eb326-ee5d-4753-b62b-9e3f16a8b413 req-cde5c6a1-c9a3-4465-961e-a0c4661cef60 service nova] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Received event network-vif-deleted-9ed4fb03-694f-424c-a7f8-b3f512a45b77 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1387.769308] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a745b98-c09f-4c2a-afad-a88ca5df3984 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.778306] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3cb8ee-f402-4603-abe2-8fea20c88732 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.810825] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd33062-c5ae-4c0d-9765-426eca0d58ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.825433] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8e59bb-96cf-4334-a6f1-6ded8024925d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.830162] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697388, 'name': CreateVM_Task, 'duration_secs': 0.597988} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.832384] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1387.832881] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.833171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.833416] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1387.833885] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eff92a81-329c-4546-af37-03b0e89bad14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.846159] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1387.851680] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1387.851680] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d7616-e48a-efc9-f09d-f9d1f4bb829f" [ 1387.851680] env[63297]: _type = "Task" [ 1387.851680] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.861816] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d7616-e48a-efc9-f09d-f9d1f4bb829f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.046512] env[63297]: DEBUG nova.network.neutron [-] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.065833] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.102131] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697390, 'name': Rename_Task, 'duration_secs': 0.151194} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.102473] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1388.102704] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11e9f9b0-dc3d-43a3-ba13-8813444dd352 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.108873] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1388.108873] env[63297]: value = "task-1697391" [ 1388.108873] env[63297]: _type = "Task" [ 1388.108873] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.121198] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.335606] env[63297]: DEBUG nova.network.neutron [-] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.364382] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d7616-e48a-efc9-f09d-f9d1f4bb829f, 'name': SearchDatastore_Task, 'duration_secs': 0.009118} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.364382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.364382] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1388.364532] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.364642] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.365544] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1388.365544] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ec0cf4d-2cab-43a2-9271-fbf3f878da0f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.373119] env[63297]: ERROR nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [req-b346cf62-8a3f-4d7a-9018-a4405317be38] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b346cf62-8a3f-4d7a-9018-a4405317be38"}]} [ 1388.377724] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1388.378460] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1388.378864] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90b9ea29-967c-407a-851d-ad5bf22981dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.385160] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1388.385160] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526e6b6d-f0c0-acc2-966a-97f83796911e" [ 1388.385160] env[63297]: _type = "Task" [ 1388.385160] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.397933] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526e6b6d-f0c0-acc2-966a-97f83796911e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.400569] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1388.422156] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1388.422428] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1388.446693] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1388.470923] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1388.549592] env[63297]: INFO nova.compute.manager [-] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Took 2.03 seconds to deallocate network for instance. [ 1388.621938] env[63297]: DEBUG oslo_vmware.api [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697391, 'name': PowerOnVM_Task, 'duration_secs': 0.463247} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.622235] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1388.622435] env[63297]: INFO nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Took 7.61 seconds to spawn the instance on the hypervisor. [ 1388.622641] env[63297]: DEBUG nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1388.623592] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb30f0c4-978d-4ad4-bf68-32ebc226a802 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.842295] env[63297]: INFO nova.compute.manager [-] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Took 2.17 seconds to deallocate network for instance. [ 1388.907425] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526e6b6d-f0c0-acc2-966a-97f83796911e, 'name': SearchDatastore_Task, 'duration_secs': 0.009892} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.908893] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bb7ab4e-07ee-45e7-bba7-39958a01108c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.917209] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1388.917209] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c82127-3200-53d5-6605-b0f37f7eebb0" [ 1388.917209] env[63297]: _type = "Task" [ 1388.917209] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.931212] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c82127-3200-53d5-6605-b0f37f7eebb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.935487] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Acquiring lock "35c68986-51b5-43ba-a076-aca3c86d68bc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.935703] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.013192] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.013491] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.054018] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fec980c-4928-4e80-8051-9356d5d6bed2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.061135] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4babbb13-5477-477e-9975-c2dbd75fffdb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.096697] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8e1588-79ab-4f10-9fe1-69a0284985d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.104112] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffefcee-c489-478b-aa2d-2e17440df4c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.117400] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1389.129710] env[63297]: INFO nova.compute.manager [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Took 0.58 seconds to detach 1 volumes for instance. [ 1389.131745] env[63297]: DEBUG nova.compute.manager [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Deleting volume: 1cf3188d-0b66-4933-9595-057e902e5d2b {{(pid=63297) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1389.144437] env[63297]: INFO nova.compute.manager [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Took 44.25 seconds to build instance. [ 1389.351651] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.427933] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c82127-3200-53d5-6605-b0f37f7eebb0, 'name': SearchDatastore_Task, 'duration_secs': 0.018163} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.428221] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.428478] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] dc196e68-b725-43a1-9848-e84d1b138245/dc196e68-b725-43a1-9848-e84d1b138245.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1389.428887] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f129aa6f-f36e-477f-b834-16662bfaf452 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.436241] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1389.436241] env[63297]: value = "task-1697393" [ 1389.436241] env[63297]: _type = "Task" [ 1389.436241] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.441859] env[63297]: DEBUG nova.compute.utils [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1389.446270] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697393, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.518802] env[63297]: INFO nova.compute.manager [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Detaching volume b0c8969b-afca-4185-99f2-ddbfd23d0d4e [ 1389.523613] env[63297]: DEBUG nova.compute.manager [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1389.525315] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45eb2fc0-1fd9-4b7f-bead-640282e98c68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.559059] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "708d1907-1619-4aa4-b0b3-ae58f046a760" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.559348] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "708d1907-1619-4aa4-b0b3-ae58f046a760" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.576247] env[63297]: INFO nova.virt.block_device [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Attempting to driver detach volume b0c8969b-afca-4185-99f2-ddbfd23d0d4e from mountpoint /dev/sdb [ 1389.576482] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1389.576669] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353871', 'volume_id': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'name': 'volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b853b581-ea46-4455-8cdb-6ea2f31c22be', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'serial': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1389.577614] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0828f55d-8b25-4f41-8a1f-25c5b7d2f2b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.604035] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3282f763-eb6a-486e-beb3-947a952ad7a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.613731] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff038da-bf55-4a47-b6c2-69795d730162 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.639601] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2ae562-8373-4798-8acd-89edfb091f39 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.655663] env[63297]: DEBUG oslo_concurrency.lockutils [None req-12b384e4-1d12-42fb-b97d-e07478f79a13 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.955s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.656328] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] The volume has not been displaced from its original location: [datastore1] volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e/volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1389.661903] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Reconfiguring VM instance instance-00000021 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1389.662878] env[63297]: ERROR nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [req-2da66dcc-5d46-49fa-bcd0-6c40e93caf52] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2da66dcc-5d46-49fa-bcd0-6c40e93caf52"}]} [ 1389.664934] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03f371fd-027b-4405-bec7-3cc8a050a1a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.769705] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1389.769705] env[63297]: value = "task-1697394" [ 1389.769705] env[63297]: _type = "Task" [ 1389.769705] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.769705] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.769705] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1389.769705] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.769705] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1389.769705] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1389.771357] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1389.796352] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1389.948131] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.948910] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697393, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.048579] env[63297]: INFO nova.compute.manager [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] instance snapshotting [ 1390.048801] env[63297]: WARNING nova.compute.manager [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1390.051941] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d213bc82-1b1e-4bb5-81b6-686af05efab4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.075729] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1486c7-69b6-4121-b18d-37d625cfb5ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.165895] env[63297]: DEBUG nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1390.199144] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697394, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.230644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] Acquiring lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.230644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] Acquired lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.230644] env[63297]: DEBUG nova.network.neutron [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1390.294304] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.294573] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.349272] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51625ae6-6dac-41ec-aab9-07cccc93c81b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.356513] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadc3f38-04e0-44ae-ae25-01baaf9e4313 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.390375] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951def45-40a6-4553-a5a4-6ae1b48bf9c3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.398084] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aff969b-971f-4a97-8f02-e8d2934eeaf4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.411556] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1390.447710] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697393, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630455} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.448273] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] dc196e68-b725-43a1-9848-e84d1b138245/dc196e68-b725-43a1-9848-e84d1b138245.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.448273] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1390.448430] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5097d562-dcf9-41a4-b063-4c051cd45dca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.454689] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1390.454689] env[63297]: value = "task-1697395" [ 1390.454689] env[63297]: _type = "Task" [ 1390.454689] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.462462] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.590257] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1390.590823] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8fc5852a-28af-4fe3-82a4-58631fd573a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.600498] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1390.600498] env[63297]: value = "task-1697396" [ 1390.600498] env[63297]: _type = "Task" [ 1390.600498] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.611280] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697396, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.698972] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697394, 'name': ReconfigVM_Task, 'duration_secs': 0.651903} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.699902] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.700201] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Reconfigured VM instance instance-00000021 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1390.704991] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c74e7b54-b233-4b75-b495-85cbf90175df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.722817] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1390.722817] env[63297]: value = "task-1697397" [ 1390.722817] env[63297]: _type = "Task" [ 1390.722817] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.734683] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697397, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.785148] env[63297]: DEBUG nova.compute.manager [req-0fb8a2f4-19b8-4c97-93da-9aa454feef95 req-202e075e-8587-426c-9e76-422d1bdc3922 service nova] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Received event network-vif-deleted-faebe641-fe2b-4fc2-828b-4348d47ab8eb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1390.785362] env[63297]: DEBUG nova.compute.manager [req-0fb8a2f4-19b8-4c97-93da-9aa454feef95 req-202e075e-8587-426c-9e76-422d1bdc3922 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Received event network-vif-deleted-14298cd9-8999-4142-9f1f-7a512e4a09d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1390.797925] env[63297]: DEBUG nova.compute.utils [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1390.949577] env[63297]: DEBUG nova.scheduler.client.report [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 79 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1390.949853] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 79 to 80 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1390.950044] env[63297]: DEBUG nova.compute.provider_tree [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1390.967302] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150205} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.967575] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.968601] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d7dc79-e629-4f5a-a6e9-a13102ca436a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.992136] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] dc196e68-b725-43a1-9848-e84d1b138245/dc196e68-b725-43a1-9848-e84d1b138245.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.992959] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f13c5e52-6863-4a99-a9e2-1dace0a887ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.017181] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Acquiring lock "35c68986-51b5-43ba-a076-aca3c86d68bc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.017446] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.017712] env[63297]: INFO nova.compute.manager [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Attaching volume d18374ff-c296-4982-b5bc-1bbfdb0a6fca to /dev/sdb [ 1391.021524] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1391.021524] env[63297]: value = "task-1697398" [ 1391.021524] env[63297]: _type = "Task" [ 1391.021524] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.036945] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697398, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.067143] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc02863e-5495-4a01-85da-9f0e4f69ebae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.074404] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6e899e-c490-40d5-9c1f-d3ec0df5b268 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.080132] env[63297]: DEBUG nova.network.neutron [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Updating instance_info_cache with network_info: [{"id": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "address": "fa:16:3e:74:e7:bc", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b5c1a4a-c8", "ovs_interfaceid": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.093064] env[63297]: DEBUG nova.virt.block_device [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Updating existing volume attachment record: e0855f63-2a52-48bf-bc4c-1c999e41d50d {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1391.110630] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697396, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.232604] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.303131] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.457990] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.788s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.458593] env[63297]: DEBUG nova.compute.manager [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1391.461386] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.415s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.461606] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.463778] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.246s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.465312] env[63297]: INFO nova.compute.claims [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1391.497890] env[63297]: INFO nova.scheduler.client.report [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleted allocations for instance 99cc8af3-5c18-4839-94db-996861e0c276 [ 1391.533164] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697398, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.583022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] Releasing lock "refresh_cache-63785911-ea55-4aeb-9ba2-6cea5ddd9cae" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.583022] env[63297]: DEBUG nova.compute.manager [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Inject network info {{(pid=63297) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1391.583261] env[63297]: DEBUG nova.compute.manager [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] network_info to inject: |[{"id": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "address": "fa:16:3e:74:e7:bc", "network": {"id": "7ca969c9-8da7-4f0b-87d4-353f8fb43051", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1647389200-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de74a055696b4dd69b88d08b52d327d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b5c1a4a-c8", "ovs_interfaceid": "2b5c1a4a-c8b7-459a-b120-b193ed9337d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1391.589035] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Reconfiguring VM instance to set the machine id {{(pid=63297) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1391.589149] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aebfa7c3-3f97-4419-9f93-c6cb862215aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.608107] env[63297]: DEBUG oslo_vmware.api [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] Waiting for the task: (returnval){ [ 1391.608107] env[63297]: value = "task-1697402" [ 1391.608107] env[63297]: _type = "Task" [ 1391.608107] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.616788] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697396, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.626027] env[63297]: DEBUG oslo_vmware.api [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] Task: {'id': task-1697402, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.734651] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.970678] env[63297]: DEBUG nova.compute.utils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.977105] env[63297]: DEBUG nova.compute.manager [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Not allocating networking since 'none' was specified. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1392.007730] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abf50b58-8d6b-4e88-8000-fe8d21ffd387 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "99cc8af3-5c18-4839-94db-996861e0c276" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.444s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.032722] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697398, 'name': ReconfigVM_Task, 'duration_secs': 0.948714} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.032722] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Reconfigured VM instance instance-00000037 to attach disk [datastore1] dc196e68-b725-43a1-9848-e84d1b138245/dc196e68-b725-43a1-9848-e84d1b138245.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1392.033071] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b2f803e-e0e2-4174-aef0-6815691eece1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.043156] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1392.043156] env[63297]: value = "task-1697403" [ 1392.043156] env[63297]: _type = "Task" [ 1392.043156] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.055126] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697403, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.116753] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697396, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.122817] env[63297]: DEBUG oslo_vmware.api [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] Task: {'id': task-1697402, 'name': ReconfigVM_Task, 'duration_secs': 0.214621} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.123100] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3e167a78-0806-4cf3-a086-fb766bcce04d tempest-ServersAdminTestJSON-355073583 tempest-ServersAdminTestJSON-355073583-project-admin] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Reconfigured VM instance to set the machine id {{(pid=63297) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1392.234930] env[63297]: DEBUG oslo_vmware.api [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697397, 'name': ReconfigVM_Task, 'duration_secs': 1.076035} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.235581] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353871', 'volume_id': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'name': 'volume-b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b853b581-ea46-4455-8cdb-6ea2f31c22be', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e', 'serial': 'b0c8969b-afca-4185-99f2-ddbfd23d0d4e'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1392.382445] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.383224] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.383224] env[63297]: INFO nova.compute.manager [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Attaching volume 958a8520-a443-40be-8c9d-7f0dbc3abcfd to /dev/sdb [ 1392.478030] env[63297]: DEBUG nova.compute.manager [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1392.555676] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697403, 'name': Rename_Task, 'duration_secs': 0.236478} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.555885] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1392.556150] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a420b32-c6c4-4202-b4ff-a27d4f06489f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.565407] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1392.565407] env[63297]: value = "task-1697404" [ 1392.565407] env[63297]: _type = "Task" [ 1392.565407] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.580377] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697404, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.617724] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697396, 'name': CreateSnapshot_Task, 'duration_secs': 1.766063} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.618310] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1392.618881] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758f9361-3a6c-42e8-a697-b13a04ee0424 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.802186] env[63297]: DEBUG nova.objects.instance [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lazy-loading 'flavor' on Instance uuid b853b581-ea46-4455-8cdb-6ea2f31c22be {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1392.874990] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ecc76b-f148-4445-adb3-45f80b8a7353 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.888557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.888852] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.894317] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9f142a-0dec-436b-ba1e-ff4b01305f38 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.915138] env[63297]: DEBUG nova.virt.block_device [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Updating existing volume attachment record: 535b54e6-7636-4c55-b171-b7217c87def2 {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1393.079208] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697404, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.082045] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.082328] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.082572] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.082762] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.082943] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.086799] env[63297]: INFO nova.compute.manager [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Terminating instance [ 1393.088522] env[63297]: DEBUG nova.compute.manager [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1393.088709] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1393.090054] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1356746d-93b1-4053-815e-01ddf574d605 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.098140] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1393.100828] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83536c00-fc83-4d07-b81f-3f2cf1baddb9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.108325] env[63297]: DEBUG oslo_vmware.api [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1393.108325] env[63297]: value = "task-1697406" [ 1393.108325] env[63297]: _type = "Task" [ 1393.108325] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.116571] env[63297]: DEBUG oslo_vmware.api [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.118342] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf92e159-8446-4cd1-a5e4-76b5880e6f23 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.126068] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368cdc38-876b-4112-b3b3-a5f1f0bc17bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.165876] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1393.166623] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-db95f978-e93e-4621-83f0-b3e26f0aea2b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.170350] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84a50cb-9d58-437f-b2ae-16987e4c2996 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.179291] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3696539-c8e5-44ba-9279-985c2e522434 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.188826] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1393.188826] env[63297]: value = "task-1697408" [ 1393.188826] env[63297]: _type = "Task" [ 1393.188826] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.198288] env[63297]: DEBUG nova.compute.provider_tree [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1393.206439] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697408, 'name': CloneVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.492470] env[63297]: DEBUG nova.compute.manager [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1393.524092] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1393.525031] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1393.525031] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.525031] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1393.525031] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.525348] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1393.526410] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1393.526410] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1393.526410] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1393.526410] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1393.526705] env[63297]: DEBUG nova.virt.hardware [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1393.528572] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0262ceca-5cb7-4eef-be27-86dfc512b730 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.537950] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094bff01-e1d3-41a7-9ddb-e5751d3a8dc0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.554136] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.560401] env[63297]: DEBUG oslo.service.loopingcall [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.560716] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1393.560935] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b9a90e8-d2e4-448c-b8dd-d12cb429ea34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.584754] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697404, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.586773] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1393.586773] env[63297]: value = "task-1697411" [ 1393.586773] env[63297]: _type = "Task" [ 1393.586773] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.596281] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697411, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.619861] env[63297]: DEBUG oslo_vmware.api [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697406, 'name': PowerOffVM_Task, 'duration_secs': 0.356099} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.621272] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.621272] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.621553] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4e86456-57ee-4223-bffb-dae95601e0eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.696547] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697408, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.722930] env[63297]: ERROR nova.scheduler.client.report [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [req-3abfde72-def1-4e7e-8259-cad7ffc1b605] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3abfde72-def1-4e7e-8259-cad7ffc1b605"}]} [ 1393.741556] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.742021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.742315] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleting the datastore file [datastore1] 5e158880-81a6-4d35-b1df-6fd59ba4a8ff {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.742649] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ecb78fc-639d-4c19-aa48-627a2af42f95 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.746401] env[63297]: DEBUG nova.scheduler.client.report [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1393.751374] env[63297]: DEBUG oslo_vmware.api [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1393.751374] env[63297]: value = "task-1697413" [ 1393.751374] env[63297]: _type = "Task" [ 1393.751374] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.761399] env[63297]: DEBUG oslo_vmware.api [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.762771] env[63297]: DEBUG nova.scheduler.client.report [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1393.762997] env[63297]: DEBUG nova.compute.provider_tree [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1393.777565] env[63297]: DEBUG nova.scheduler.client.report [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1393.799858] env[63297]: DEBUG nova.scheduler.client.report [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1393.811843] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5142c4ab-6487-4321-85ed-84084129d896 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.798s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.086030] env[63297]: DEBUG oslo_vmware.api [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697404, 'name': PowerOnVM_Task, 'duration_secs': 1.059102} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.086155] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1394.086357] env[63297]: INFO nova.compute.manager [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Took 7.39 seconds to spawn the instance on the hypervisor. [ 1394.086535] env[63297]: DEBUG nova.compute.manager [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1394.087352] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02339455-5ec1-43ac-a3b3-a49e37e886e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.104636] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697411, 'name': CreateVM_Task, 'duration_secs': 0.335745} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.105044] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1394.105467] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.105686] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.105990] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1394.106247] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6c5f342-16a9-45cc-9dd1-9bdb6108b16f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.112344] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1394.112344] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ded8f6-fca1-88b1-74be-a0b8fbaba441" [ 1394.112344] env[63297]: _type = "Task" [ 1394.112344] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.122023] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ded8f6-fca1-88b1-74be-a0b8fbaba441, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.198116] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697408, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.263225] env[63297]: DEBUG oslo_vmware.api [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697413, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.309483} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.263651] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.263859] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.264047] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.264221] env[63297]: INFO nova.compute.manager [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1394.264471] env[63297]: DEBUG oslo.service.loopingcall [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1394.264657] env[63297]: DEBUG nova.compute.manager [-] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1394.264764] env[63297]: DEBUG nova.network.neutron [-] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1394.348974] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81529ef6-014f-4fc9-8f96-6ad239436996 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.357463] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c5eecf-a026-404f-9382-cc184e5d01b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.389436] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1284d7e8-9998-45cc-b355-8e2026152429 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.397793] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28111573-a91a-408f-be6f-08eabb5807ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.413569] env[63297]: DEBUG nova.compute.provider_tree [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1394.617103] env[63297]: INFO nova.compute.manager [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Took 41.93 seconds to build instance. [ 1394.626340] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ded8f6-fca1-88b1-74be-a0b8fbaba441, 'name': SearchDatastore_Task, 'duration_secs': 0.024512} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.626642] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.626855] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1394.627111] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.627289] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.627536] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1394.627693] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b38ad61f-0255-45e4-849b-c5a11fcc758f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.681184] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1394.681328] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1394.682117] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd147633-98d0-42d8-8fc1-e5eb200e12b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.688340] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1394.688340] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5218186a-e4f3-5569-5f9d-b53ce0bc130a" [ 1394.688340] env[63297]: _type = "Task" [ 1394.688340] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.701510] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697408, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.704583] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5218186a-e4f3-5569-5f9d-b53ce0bc130a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.842429] env[63297]: DEBUG nova.compute.manager [req-cb6bda86-136e-4a87-83a3-0ff1624b4bb9 req-8ac02126-6e29-4032-8fc4-5058abbd1606 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Received event network-vif-deleted-2dd60b07-2f52-40a1-96a9-05d6dd307592 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1394.842691] env[63297]: INFO nova.compute.manager [req-cb6bda86-136e-4a87-83a3-0ff1624b4bb9 req-8ac02126-6e29-4032-8fc4-5058abbd1606 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Neutron deleted interface 2dd60b07-2f52-40a1-96a9-05d6dd307592; detaching it from the instance and deleting it from the info cache [ 1394.842899] env[63297]: DEBUG nova.network.neutron [req-cb6bda86-136e-4a87-83a3-0ff1624b4bb9 req-8ac02126-6e29-4032-8fc4-5058abbd1606 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.917150] env[63297]: DEBUG nova.scheduler.client.report [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1394.994058] env[63297]: DEBUG nova.network.neutron [-] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.120188] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c2f94a4e-4dff-4812-8b2f-0294913d2068 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "dc196e68-b725-43a1-9848-e84d1b138245" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.161s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.201767] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697408, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.205357] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5218186a-e4f3-5569-5f9d-b53ce0bc130a, 'name': SearchDatastore_Task, 'duration_secs': 0.017767} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.206156] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d368a7-9335-432d-b0ee-54603abcdefe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.212261] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1395.212261] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b81dd8-8400-834b-b51b-662c95148781" [ 1395.212261] env[63297]: _type = "Task" [ 1395.212261] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.219911] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b81dd8-8400-834b-b51b-662c95148781, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.346484] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d301c4bd-07e2-496f-9c47-8b7de77fb3fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.357042] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3557f5-bdb6-438c-8055-6251f6cf6f1a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.399267] env[63297]: DEBUG nova.compute.manager [req-cb6bda86-136e-4a87-83a3-0ff1624b4bb9 req-8ac02126-6e29-4032-8fc4-5058abbd1606 service nova] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Detach interface failed, port_id=2dd60b07-2f52-40a1-96a9-05d6dd307592, reason: Instance 5e158880-81a6-4d35-b1df-6fd59ba4a8ff could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1395.422563] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.959s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.423101] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1395.425743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.947s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.425956] env[63297]: DEBUG nova.objects.instance [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lazy-loading 'resources' on Instance uuid 272180b9-e79b-4714-b28b-470937509f42 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1395.497088] env[63297]: INFO nova.compute.manager [-] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Took 1.23 seconds to deallocate network for instance. [ 1395.623314] env[63297]: DEBUG nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1395.651778] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1395.652060] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353881', 'volume_id': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'name': 'volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '35c68986-51b5-43ba-a076-aca3c86d68bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'serial': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1395.652910] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297b109b-a13b-45f2-817e-0ea2233ab3aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.671616] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c65d85-02b2-4b64-94a5-c61d182b1dbb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.701150] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca/volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1395.706490] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa54e288-16f2-489d-9cdf-6dd5d4cf6f90 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.724208] env[63297]: DEBUG oslo_concurrency.lockutils [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.724439] env[63297]: DEBUG oslo_concurrency.lockutils [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.729226] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697408, 'name': CloneVM_Task, 'duration_secs': 2.167554} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.732583] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Created linked-clone VM from snapshot [ 1395.733643] env[63297]: DEBUG oslo_vmware.api [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Waiting for the task: (returnval){ [ 1395.733643] env[63297]: value = "task-1697415" [ 1395.733643] env[63297]: _type = "Task" [ 1395.733643] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.734328] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d337531-09c3-4feb-8b9b-4973e7278db0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.747975] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b81dd8-8400-834b-b51b-662c95148781, 'name': SearchDatastore_Task, 'duration_secs': 0.01549} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.749146] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.749447] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6/fb33135a-073d-4d80-9833-5b29afae1cc6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1395.750047] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e725d26-78d0-41ff-b214-1c76e94da82b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.761894] env[63297]: DEBUG oslo_vmware.api [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Task: {'id': task-1697415, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.761894] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Uploading image f8358d93-1d74-4bbd-acb8-9b4db000374d {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1395.767509] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1395.767509] env[63297]: value = "task-1697416" [ 1395.767509] env[63297]: _type = "Task" [ 1395.767509] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.779155] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697416, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.787403] env[63297]: DEBUG oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1395.787403] env[63297]: value = "vm-353885" [ 1395.787403] env[63297]: _type = "VirtualMachine" [ 1395.787403] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1395.787711] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1b903e6d-34f9-4ad6-af1d-46f13428d9d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.797097] env[63297]: DEBUG oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lease: (returnval){ [ 1395.797097] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d0d82-a862-be91-de46-8230a499763b" [ 1395.797097] env[63297]: _type = "HttpNfcLease" [ 1395.797097] env[63297]: } obtained for exporting VM: (result){ [ 1395.797097] env[63297]: value = "vm-353885" [ 1395.797097] env[63297]: _type = "VirtualMachine" [ 1395.797097] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1395.797408] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the lease: (returnval){ [ 1395.797408] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d0d82-a862-be91-de46-8230a499763b" [ 1395.797408] env[63297]: _type = "HttpNfcLease" [ 1395.797408] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1395.805687] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1395.805687] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d0d82-a862-be91-de46-8230a499763b" [ 1395.805687] env[63297]: _type = "HttpNfcLease" [ 1395.805687] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1395.928863] env[63297]: DEBUG nova.compute.utils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1395.933265] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1395.933789] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1395.986870] env[63297]: INFO nova.compute.manager [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Rebuilding instance [ 1395.997236] env[63297]: DEBUG nova.policy [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb119f2cb29446108035492d78b47b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '790de76ab96a4e70a18619744dba096c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1396.008727] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.040895] env[63297]: DEBUG nova.compute.manager [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1396.042125] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849d9609-5afd-4330-8211-1381a732cd57 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.147152] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.230659] env[63297]: DEBUG nova.compute.utils [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1396.252788] env[63297]: DEBUG oslo_vmware.api [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Task: {'id': task-1697415, 'name': ReconfigVM_Task, 'duration_secs': 0.386786} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.255650] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Reconfigured VM instance instance-0000000f to attach disk [datastore1] volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca/volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1396.263393] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9a28fab-f5b2-41ff-b7a1-7088770279cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.288462] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697416, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.292956] env[63297]: DEBUG oslo_vmware.api [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Waiting for the task: (returnval){ [ 1396.292956] env[63297]: value = "task-1697418" [ 1396.292956] env[63297]: _type = "Task" [ 1396.292956] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.306236] env[63297]: DEBUG oslo_vmware.api [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Task: {'id': task-1697418, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.311689] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1396.311689] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d0d82-a862-be91-de46-8230a499763b" [ 1396.311689] env[63297]: _type = "HttpNfcLease" [ 1396.311689] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1396.312109] env[63297]: DEBUG oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1396.312109] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d0d82-a862-be91-de46-8230a499763b" [ 1396.312109] env[63297]: _type = "HttpNfcLease" [ 1396.312109] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1396.312866] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3933bfdf-dca8-4945-bc80-906361163699 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.326667] env[63297]: DEBUG oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526ffe12-1cc2-60e8-9e03-4e919fdf2a03/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1396.326899] env[63297]: DEBUG oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526ffe12-1cc2-60e8-9e03-4e919fdf2a03/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1396.329222] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Successfully created port: d0d9c69a-a4dc-4597-aeef-2c866176b393 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1396.435705] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1396.439299] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cdac7312-dc05-4748-9c92-9c57418bd025 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.524802] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dea178-bf59-49fd-9267-8dcfaf4c7350 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.533031] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77255320-2afd-4b8e-adf7-dcf49ac4787d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.567122] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1396.567626] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea89a7da-ca41-4c30-911c-586e490adb68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.569784] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e965e381-b18d-4f1c-8dbf-c4a0b6dd92b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.577917] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80499e5d-c928-4ea1-84c2-24033736b0ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.583694] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1396.583694] env[63297]: value = "task-1697419" [ 1396.583694] env[63297]: _type = "Task" [ 1396.583694] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.596619] env[63297]: DEBUG nova.compute.provider_tree [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.605120] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.734358] env[63297]: DEBUG oslo_concurrency.lockutils [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.790088] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697416, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583021} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.790447] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6/fb33135a-073d-4d80-9833-5b29afae1cc6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1396.790742] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1396.791072] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-012a5ad8-0bf9-4cfc-9da2-95dacefb67b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.803129] env[63297]: DEBUG oslo_vmware.api [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Task: {'id': task-1697418, 'name': ReconfigVM_Task, 'duration_secs': 0.169296} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.804679] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353881', 'volume_id': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'name': 'volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '35c68986-51b5-43ba-a076-aca3c86d68bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'serial': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1396.806704] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1396.806704] env[63297]: value = "task-1697420" [ 1396.806704] env[63297]: _type = "Task" [ 1396.806704] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.816974] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697420, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.096651] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697419, 'name': PowerOffVM_Task, 'duration_secs': 0.235865} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.097029] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1397.097617] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1397.098295] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd90a88-5f79-4f06-80e1-17b974be085d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.101551] env[63297]: DEBUG nova.scheduler.client.report [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1397.109633] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1397.109933] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13f4e424-c733-474f-b951-d55323429316 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.196306] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1397.196611] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1397.196808] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleting the datastore file [datastore1] b14e8466-68ab-4705-a439-6db961a149b0 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1397.197157] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7dd7b18d-0beb-4848-a919-33770e0e21f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.204913] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1397.204913] env[63297]: value = "task-1697422" [ 1397.204913] env[63297]: _type = "Task" [ 1397.204913] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.213964] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.320972] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697420, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126518} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.321698] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1397.322549] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8a4526-cb5d-4056-b45a-cc40b1c7184f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.348582] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6/fb33135a-073d-4d80-9833-5b29afae1cc6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.351234] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d38d4b2b-b321-4dc1-90e6-be142d5653aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.369520] env[63297]: DEBUG nova.objects.instance [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Lazy-loading 'flavor' on Instance uuid 35c68986-51b5-43ba-a076-aca3c86d68bc {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1397.379405] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1397.379405] env[63297]: value = "task-1697423" [ 1397.379405] env[63297]: _type = "Task" [ 1397.379405] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.391897] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.452540] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1397.466402] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.466991] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.471576] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1397.471939] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353884', 'volume_id': '958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'name': 'volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f', 'attached_at': '', 'detached_at': '', 'volume_id': '958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'serial': '958a8520-a443-40be-8c9d-7f0dbc3abcfd'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1397.473148] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161d79a1-ee77-4390-9d9a-46b1104b7394 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.497419] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1397.497746] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1397.497956] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1397.498229] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1397.498442] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1397.498629] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1397.498895] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1397.499140] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1397.499362] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1397.499591] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1397.499825] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1397.500742] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f70c14-e3d9-4e0e-afbf-84fc3ae141e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.503966] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1b425a-dd0b-4c6b-a98b-ce2f370458ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.537541] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd/volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.538022] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2765a407-4995-41de-a578-7db5ffe2beba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.552618] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e0fb73-6e26-4b4e-8356-9ef679032928 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.570333] env[63297]: DEBUG oslo_vmware.api [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1397.570333] env[63297]: value = "task-1697424" [ 1397.570333] env[63297]: _type = "Task" [ 1397.570333] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.580530] env[63297]: DEBUG oslo_vmware.api [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697424, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.608048] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.182s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.612368] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.806s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.614432] env[63297]: INFO nova.compute.claims [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1397.649194] env[63297]: INFO nova.scheduler.client.report [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Deleted allocations for instance 272180b9-e79b-4714-b28b-470937509f42 [ 1397.716994] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386126} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.717303] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1397.717607] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1397.717846] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1397.770801] env[63297]: DEBUG oslo_concurrency.lockutils [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.771293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.771365] env[63297]: INFO nova.compute.manager [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Attaching volume 1f16c2ed-7350-4649-be78-689cfc165090 to /dev/sdb [ 1397.824208] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc165463-0631-4037-ab70-a2a38f99af80 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.831563] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65acf0d4-d11a-43e9-8977-2bd11e2a811b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.852514] env[63297]: DEBUG nova.virt.block_device [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updating existing volume attachment record: 8b00486c-9297-4dbb-b4eb-7f861fbe32fd {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1397.874757] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50a837bd-7440-4077-b3c1-a69c8b02c8e3 tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 6.857s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.891604] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697423, 'name': ReconfigVM_Task, 'duration_secs': 0.360752} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.893080] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Reconfigured VM instance instance-00000038 to attach disk [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6/fb33135a-073d-4d80-9833-5b29afae1cc6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1397.893855] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db69ac9a-3cb1-408b-b4a7-b33f037dcf30 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.904124] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1397.904124] env[63297]: value = "task-1697425" [ 1397.904124] env[63297]: _type = "Task" [ 1397.904124] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.916618] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697425, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.080797] env[63297]: DEBUG oslo_vmware.api [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697424, 'name': ReconfigVM_Task, 'duration_secs': 0.504451} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.081960] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Reconfigured VM instance instance-00000024 to attach disk [datastore1] volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd/volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1398.088891] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88f69cf9-2f91-4eb2-bb49-98098c92d0e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.117579] env[63297]: DEBUG oslo_vmware.api [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1398.117579] env[63297]: value = "task-1697427" [ 1398.117579] env[63297]: _type = "Task" [ 1398.117579] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.118908] env[63297]: DEBUG nova.compute.manager [req-3809803b-fdf5-4cbb-bd3c-7e60ca31f36d req-96089bf5-c478-4e0d-a903-13c0df9e8c84 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Received event network-vif-plugged-d0d9c69a-a4dc-4597-aeef-2c866176b393 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1398.119321] env[63297]: DEBUG oslo_concurrency.lockutils [req-3809803b-fdf5-4cbb-bd3c-7e60ca31f36d req-96089bf5-c478-4e0d-a903-13c0df9e8c84 service nova] Acquiring lock "c147f97d-7fae-4364-a9c0-04978df2450f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.119611] env[63297]: DEBUG oslo_concurrency.lockutils [req-3809803b-fdf5-4cbb-bd3c-7e60ca31f36d req-96089bf5-c478-4e0d-a903-13c0df9e8c84 service nova] Lock "c147f97d-7fae-4364-a9c0-04978df2450f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.119839] env[63297]: DEBUG oslo_concurrency.lockutils [req-3809803b-fdf5-4cbb-bd3c-7e60ca31f36d req-96089bf5-c478-4e0d-a903-13c0df9e8c84 service nova] Lock "c147f97d-7fae-4364-a9c0-04978df2450f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.120084] env[63297]: DEBUG nova.compute.manager [req-3809803b-fdf5-4cbb-bd3c-7e60ca31f36d req-96089bf5-c478-4e0d-a903-13c0df9e8c84 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] No waiting events found dispatching network-vif-plugged-d0d9c69a-a4dc-4597-aeef-2c866176b393 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1398.120300] env[63297]: WARNING nova.compute.manager [req-3809803b-fdf5-4cbb-bd3c-7e60ca31f36d req-96089bf5-c478-4e0d-a903-13c0df9e8c84 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Received unexpected event network-vif-plugged-d0d9c69a-a4dc-4597-aeef-2c866176b393 for instance with vm_state building and task_state spawning. [ 1398.139705] env[63297]: DEBUG oslo_vmware.api [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697427, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.163075] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1cd4ce0b-440d-4272-975b-07159421d259 tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "272180b9-e79b-4714-b28b-470937509f42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.725s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.237568] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Successfully updated port: d0d9c69a-a4dc-4597-aeef-2c866176b393 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1398.418263] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697425, 'name': Rename_Task, 'duration_secs': 0.241987} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.418594] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1398.418860] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fecd7ed-cfc9-4f39-bda7-387e499a5ed9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.427516] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1398.427516] env[63297]: value = "task-1697430" [ 1398.427516] env[63297]: _type = "Task" [ 1398.427516] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.444837] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697430, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.637111] env[63297]: DEBUG oslo_vmware.api [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697427, 'name': ReconfigVM_Task, 'duration_secs': 0.163204} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.638024] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353884', 'volume_id': '958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'name': 'volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f', 'attached_at': '', 'detached_at': '', 'volume_id': '958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'serial': '958a8520-a443-40be-8c9d-7f0dbc3abcfd'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1398.740222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "refresh_cache-c147f97d-7fae-4364-a9c0-04978df2450f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.740458] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "refresh_cache-c147f97d-7fae-4364-a9c0-04978df2450f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.740642] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1398.765217] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1398.765476] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1398.766439] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1398.766439] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1398.766439] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1398.766439] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1398.766439] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1398.766689] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1398.767993] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1398.767993] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1398.767993] env[63297]: DEBUG nova.virt.hardware [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1398.769824] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298959bb-3237-435b-88a4-1845d1dcec4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.779832] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9b4925-148d-4021-9615-583810d83383 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.796630] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:73:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '603a207a-5c56-4835-a1be-961da01f6f07', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1398.804606] env[63297]: DEBUG oslo.service.loopingcall [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1398.807782] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1398.808250] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-565a314b-da73-4682-a7ab-520b1bb32049 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.829904] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Acquiring lock "35c68986-51b5-43ba-a076-aca3c86d68bc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.830856] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.835542] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1398.835542] env[63297]: value = "task-1697431" [ 1398.835542] env[63297]: _type = "Task" [ 1398.835542] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.849693] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697431, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.939502] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697430, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.256939] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aff40e8-76b9-4314-9c6f-ce2adeb7139d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.266532] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bade56f0-e74a-4a0a-937e-1375b53d2641 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.273497] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1399.300626] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af943aa6-b261-4acd-8fcd-fc5008291a19 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.309234] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a1fb64-90ad-42ce-8a2b-e5185fed232b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.327386] env[63297]: DEBUG nova.compute.provider_tree [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.333567] env[63297]: INFO nova.compute.manager [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Detaching volume d18374ff-c296-4982-b5bc-1bbfdb0a6fca [ 1399.348485] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697431, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.371152] env[63297]: INFO nova.virt.block_device [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Attempting to driver detach volume d18374ff-c296-4982-b5bc-1bbfdb0a6fca from mountpoint /dev/sdb [ 1399.371412] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1399.371598] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353881', 'volume_id': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'name': 'volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '35c68986-51b5-43ba-a076-aca3c86d68bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'serial': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1399.372640] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2d4da8-6956-46f2-a410-4107abae9635 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.401275] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d452d9e-9e7d-4adf-ae81-819812f5f139 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.412504] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8505531-0fc0-4180-9b57-054e88540ec6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.440078] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46aab001-10a3-4ce0-b0e3-99b435905ba6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.456561] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] The volume has not been displaced from its original location: [datastore1] volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca/volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1399.462992] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Reconfiguring VM instance instance-0000000f to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1399.466371] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1dd195e-815c-4e03-a254-5d266cf823cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.480623] env[63297]: DEBUG oslo_vmware.api [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697430, 'name': PowerOnVM_Task, 'duration_secs': 0.531829} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.481216] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1399.481445] env[63297]: INFO nova.compute.manager [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Took 5.99 seconds to spawn the instance on the hypervisor. [ 1399.481636] env[63297]: DEBUG nova.compute.manager [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1399.482840] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28e1c2b-2a41-48e6-bc3f-4cef19e8d84f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.489307] env[63297]: DEBUG oslo_vmware.api [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Waiting for the task: (returnval){ [ 1399.489307] env[63297]: value = "task-1697432" [ 1399.489307] env[63297]: _type = "Task" [ 1399.489307] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.505332] env[63297]: DEBUG oslo_vmware.api [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Task: {'id': task-1697432, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.567130] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "b65e8c04-df55-491e-861c-8aa6def8c9be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.567415] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "b65e8c04-df55-491e-861c-8aa6def8c9be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.567625] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "b65e8c04-df55-491e-861c-8aa6def8c9be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.567854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "b65e8c04-df55-491e-861c-8aa6def8c9be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.568040] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "b65e8c04-df55-491e-861c-8aa6def8c9be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.570247] env[63297]: INFO nova.compute.manager [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Terminating instance [ 1399.572271] env[63297]: DEBUG nova.compute.manager [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1399.572422] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1399.573251] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc04d85a-f490-42fa-bbd4-28c7347be394 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.577039] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Updating instance_info_cache with network_info: [{"id": "d0d9c69a-a4dc-4597-aeef-2c866176b393", "address": "fa:16:3e:22:da:0d", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d9c69a-a4", "ovs_interfaceid": "d0d9c69a-a4dc-4597-aeef-2c866176b393", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.584767] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1399.585098] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6f6509f-0763-47ae-9267-48126462df00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.595857] env[63297]: DEBUG oslo_vmware.api [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1399.595857] env[63297]: value = "task-1697433" [ 1399.595857] env[63297]: _type = "Task" [ 1399.595857] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.606703] env[63297]: DEBUG oslo_vmware.api [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697433, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.687026] env[63297]: DEBUG nova.objects.instance [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'flavor' on Instance uuid 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1399.831328] env[63297]: DEBUG nova.scheduler.client.report [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1399.846580] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697431, 'name': CreateVM_Task, 'duration_secs': 0.615962} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.846745] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.847409] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.847561] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.847931] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1399.848223] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc946772-e6ad-49da-ab7a-636b0778b5d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.854288] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1399.854288] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5288a69a-dafd-075b-bbce-27e1361ef291" [ 1399.854288] env[63297]: _type = "Task" [ 1399.854288] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.863131] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5288a69a-dafd-075b-bbce-27e1361ef291, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.008014] env[63297]: DEBUG oslo_vmware.api [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Task: {'id': task-1697432, 'name': ReconfigVM_Task, 'duration_secs': 0.265094} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.008014] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Reconfigured VM instance instance-0000000f to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1400.013608] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df623465-5b00-487b-8083-127dc90a8b6d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.024571] env[63297]: INFO nova.compute.manager [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Took 42.84 seconds to build instance. [ 1400.033569] env[63297]: DEBUG oslo_vmware.api [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Waiting for the task: (returnval){ [ 1400.033569] env[63297]: value = "task-1697434" [ 1400.033569] env[63297]: _type = "Task" [ 1400.033569] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.044060] env[63297]: DEBUG oslo_vmware.api [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Task: {'id': task-1697434, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.079803] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "refresh_cache-c147f97d-7fae-4364-a9c0-04978df2450f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.079930] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Instance network_info: |[{"id": "d0d9c69a-a4dc-4597-aeef-2c866176b393", "address": "fa:16:3e:22:da:0d", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d9c69a-a4", "ovs_interfaceid": "d0d9c69a-a4dc-4597-aeef-2c866176b393", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1400.080489] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:da:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0d9c69a-a4dc-4597-aeef-2c866176b393', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1400.088256] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Creating folder: Project (790de76ab96a4e70a18619744dba096c). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1400.088626] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2740ba18-152f-404e-a2a9-0c2938332504 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.103015] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Created folder: Project (790de76ab96a4e70a18619744dba096c) in parent group-v353718. [ 1400.103240] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Creating folder: Instances. Parent ref: group-v353890. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1400.103940] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49bdafeb-6761-4469-8840-8cc46f59232e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.110045] env[63297]: DEBUG oslo_vmware.api [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697433, 'name': PowerOffVM_Task, 'duration_secs': 0.18215} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.110678] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1400.110911] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1400.111217] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecea0e7f-31b1-45c0-b30b-dfd673cb5b45 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.122447] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Created folder: Instances in parent group-v353890. [ 1400.122554] env[63297]: DEBUG oslo.service.loopingcall [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1400.122818] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1400.122995] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a326de5a-2274-43fb-bd88-2f2d9df43bfb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.145108] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1400.145108] env[63297]: value = "task-1697438" [ 1400.145108] env[63297]: _type = "Task" [ 1400.145108] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.156799] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697438, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.164699] env[63297]: DEBUG nova.compute.manager [req-b050ef7d-a105-45a8-9402-f7f5edffd2a8 req-baacf19d-f0f2-4772-99fb-020bf8ff1b37 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Received event network-changed-d0d9c69a-a4dc-4597-aeef-2c866176b393 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1400.164875] env[63297]: DEBUG nova.compute.manager [req-b050ef7d-a105-45a8-9402-f7f5edffd2a8 req-baacf19d-f0f2-4772-99fb-020bf8ff1b37 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Refreshing instance network info cache due to event network-changed-d0d9c69a-a4dc-4597-aeef-2c866176b393. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1400.165144] env[63297]: DEBUG oslo_concurrency.lockutils [req-b050ef7d-a105-45a8-9402-f7f5edffd2a8 req-baacf19d-f0f2-4772-99fb-020bf8ff1b37 service nova] Acquiring lock "refresh_cache-c147f97d-7fae-4364-a9c0-04978df2450f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.165295] env[63297]: DEBUG oslo_concurrency.lockutils [req-b050ef7d-a105-45a8-9402-f7f5edffd2a8 req-baacf19d-f0f2-4772-99fb-020bf8ff1b37 service nova] Acquired lock "refresh_cache-c147f97d-7fae-4364-a9c0-04978df2450f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.165560] env[63297]: DEBUG nova.network.neutron [req-b050ef7d-a105-45a8-9402-f7f5edffd2a8 req-baacf19d-f0f2-4772-99fb-020bf8ff1b37 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Refreshing network info cache for port d0d9c69a-a4dc-4597-aeef-2c866176b393 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1400.193230] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3635e3ec-4a81-40b4-8603-d78c56121f32 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.810s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.291252] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1400.291509] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1400.291592] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Deleting the datastore file [datastore1] b65e8c04-df55-491e-861c-8aa6def8c9be {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1400.293039] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a488df92-ad61-48a8-bd50-b8076661af0a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.300201] env[63297]: DEBUG oslo_vmware.api [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for the task: (returnval){ [ 1400.300201] env[63297]: value = "task-1697439" [ 1400.300201] env[63297]: _type = "Task" [ 1400.300201] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.309321] env[63297]: DEBUG oslo_vmware.api [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.337524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.725s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.338041] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1400.340827] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.038s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.341062] env[63297]: DEBUG nova.objects.instance [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lazy-loading 'resources' on Instance uuid 41b1ce5d-a8ac-4b93-94a3-cf26367266d6 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1400.367987] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5288a69a-dafd-075b-bbce-27e1361ef291, 'name': SearchDatastore_Task, 'duration_secs': 0.034201} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.368547] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.368721] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.368982] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.369155] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.369379] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1400.369652] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49c16ad9-6bbc-4616-9de1-4337ffa6e4a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.390295] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.390612] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1400.396368] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba1b181f-ba5b-4ffe-9a67-2e7ab4b6ad92 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.406063] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1400.406063] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520bacf5-c82b-def5-4cc5-f87f9716b284" [ 1400.406063] env[63297]: _type = "Task" [ 1400.406063] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.421262] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520bacf5-c82b-def5-4cc5-f87f9716b284, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.527927] env[63297]: DEBUG oslo_concurrency.lockutils [None req-87331e00-af0e-4f7f-bc18-b75a6bbf4921 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "fb33135a-073d-4d80-9833-5b29afae1cc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.279s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.547140] env[63297]: DEBUG oslo_vmware.api [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Task: {'id': task-1697434, 'name': ReconfigVM_Task, 'duration_secs': 0.177015} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.552064] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353881', 'volume_id': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'name': 'volume-d18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '35c68986-51b5-43ba-a076-aca3c86d68bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca', 'serial': 'd18374ff-c296-4982-b5bc-1bbfdb0a6fca'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1400.658643] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697438, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.814034] env[63297]: DEBUG oslo_vmware.api [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Task: {'id': task-1697439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.370486} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.814428] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1400.814661] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1400.815046] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1400.815280] env[63297]: INFO nova.compute.manager [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1400.815556] env[63297]: DEBUG oslo.service.loopingcall [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1400.816154] env[63297]: DEBUG nova.compute.manager [-] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1400.816290] env[63297]: DEBUG nova.network.neutron [-] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1400.847795] env[63297]: DEBUG nova.compute.utils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1400.849394] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1400.849612] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1400.898391] env[63297]: DEBUG nova.policy [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb119f2cb29446108035492d78b47b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '790de76ab96a4e70a18619744dba096c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1400.920295] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520bacf5-c82b-def5-4cc5-f87f9716b284, 'name': SearchDatastore_Task, 'duration_secs': 0.01611} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.926825] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4d35750-962a-4a65-8f44-f4fe0444bb3c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.935524] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1400.935524] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52965290-6c16-119c-2de6-0574826e89a8" [ 1400.935524] env[63297]: _type = "Task" [ 1400.935524] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.948266] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52965290-6c16-119c-2de6-0574826e89a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.032600] env[63297]: DEBUG nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1401.117747] env[63297]: DEBUG nova.objects.instance [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Lazy-loading 'flavor' on Instance uuid 35c68986-51b5-43ba-a076-aca3c86d68bc {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1401.161718] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697438, 'name': CreateVM_Task, 'duration_secs': 0.606535} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.165281] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1401.166318] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.166406] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.166680] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1401.167524] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fbb4874-ae64-4d98-80f3-7b654ccd3ccb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.173063] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1401.173063] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52693c12-544c-16e1-c86d-f9576e811165" [ 1401.173063] env[63297]: _type = "Task" [ 1401.173063] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.181839] env[63297]: DEBUG nova.network.neutron [req-b050ef7d-a105-45a8-9402-f7f5edffd2a8 req-baacf19d-f0f2-4772-99fb-020bf8ff1b37 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Updated VIF entry in instance network info cache for port d0d9c69a-a4dc-4597-aeef-2c866176b393. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1401.182373] env[63297]: DEBUG nova.network.neutron [req-b050ef7d-a105-45a8-9402-f7f5edffd2a8 req-baacf19d-f0f2-4772-99fb-020bf8ff1b37 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Updating instance_info_cache with network_info: [{"id": "d0d9c69a-a4dc-4597-aeef-2c866176b393", "address": "fa:16:3e:22:da:0d", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d9c69a-a4", "ovs_interfaceid": "d0d9c69a-a4dc-4597-aeef-2c866176b393", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.189993] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52693c12-544c-16e1-c86d-f9576e811165, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.289635] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Successfully created port: 4c61e911-9830-4ecf-b4e8-2b56f86048dc {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1401.352839] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1401.446711] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52965290-6c16-119c-2de6-0574826e89a8, 'name': SearchDatastore_Task, 'duration_secs': 0.01713} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.446999] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.447269] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1401.450107] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c339c9fe-984c-4ae3-b040-82cdd80337ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.460590] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1401.460590] env[63297]: value = "task-1697441" [ 1401.460590] env[63297]: _type = "Task" [ 1401.460590] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.470923] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697441, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.472824] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b8a598-1dec-4d85-ba82-292ee3f51e91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.481968] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70faaea-9fd7-40a1-a4aa-e7ae4c155d89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.515291] env[63297]: INFO nova.compute.manager [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Rebuilding instance [ 1401.519200] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3879cc76-fb86-4ae8-a6cd-a96b1f2b403b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.528373] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43bbaaf-96f0-4f95-9e71-9f1cf107d7e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.551726] env[63297]: DEBUG nova.compute.provider_tree [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.571564] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.591947] env[63297]: DEBUG nova.compute.manager [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1401.593135] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0993aad6-f79a-475c-816d-3a05f8f56e02 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.687199] env[63297]: DEBUG oslo_concurrency.lockutils [req-b050ef7d-a105-45a8-9402-f7f5edffd2a8 req-baacf19d-f0f2-4772-99fb-020bf8ff1b37 service nova] Releasing lock "refresh_cache-c147f97d-7fae-4364-a9c0-04978df2450f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.687824] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52693c12-544c-16e1-c86d-f9576e811165, 'name': SearchDatastore_Task, 'duration_secs': 0.024685} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.688228] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.688723] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1401.689168] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.689573] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.690019] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1401.690457] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46feb530-0b31-4765-b0e5-801ee3001e07 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.703029] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1401.703029] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1401.703665] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c7a0873-7c70-4a67-8b36-c80ed8ea5691 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.710748] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1401.710748] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a4488-4b55-986c-dbcb-016840ab4f7e" [ 1401.710748] env[63297]: _type = "Task" [ 1401.710748] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.721874] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a4488-4b55-986c-dbcb-016840ab4f7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.804305] env[63297]: DEBUG nova.network.neutron [-] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.971969] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697441, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.057819] env[63297]: DEBUG nova.scheduler.client.report [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1402.105470] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.105550] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-811e83c5-397f-4861-8cef-a35d5a155725 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.116154] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1402.116154] env[63297]: value = "task-1697443" [ 1402.116154] env[63297]: _type = "Task" [ 1402.116154] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.127015] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdc60f7c-c8cd-4f05-be05-74b9a6331aee tempest-VolumesAssistedSnapshotsTest-287992536 tempest-VolumesAssistedSnapshotsTest-287992536-project-admin] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.297s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.129659] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.221311] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "96265295-6b0c-4803-bb89-6166c9d3fc7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.221628] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "96265295-6b0c-4803-bb89-6166c9d3fc7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.227419] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a4488-4b55-986c-dbcb-016840ab4f7e, 'name': SearchDatastore_Task, 'duration_secs': 0.049246} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.228406] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ac44d23-7207-48bc-943c-4d8da13efe96 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.234345] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1402.234345] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b8c16-1e3d-b2d9-94d6-feaf060f0206" [ 1402.234345] env[63297]: _type = "Task" [ 1402.234345] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.245134] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b8c16-1e3d-b2d9-94d6-feaf060f0206, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.246267] env[63297]: DEBUG nova.compute.manager [req-2e8885ec-08b0-437f-b5c3-c81de3f77644 req-f96c6efb-6098-4d03-a404-9f95eb905cb6 service nova] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Received event network-vif-deleted-e73c7c5d-39d1-4f9a-a90b-695c08f4d2b3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1402.307931] env[63297]: INFO nova.compute.manager [-] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Took 1.49 seconds to deallocate network for instance. [ 1402.363007] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1402.388561] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1402.388847] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1402.389037] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1402.389203] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1402.389349] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1402.389491] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1402.389695] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1402.389849] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1402.390081] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1402.390262] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1402.390432] env[63297]: DEBUG nova.virt.hardware [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1402.391375] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76e04b4-1f16-4e9d-926b-08b96875bce0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.400848] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd212caf-c7db-4748-bff0-9a14a3d5525b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.471153] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697441, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570349} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.471481] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1402.471752] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1402.472044] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08fba82c-c43b-4d7a-a071-637b2944084d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.480547] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1402.480547] env[63297]: value = "task-1697444" [ 1402.480547] env[63297]: _type = "Task" [ 1402.480547] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.489335] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.563716] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.223s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.566094] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.877s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.588038] env[63297]: INFO nova.scheduler.client.report [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Deleted allocations for instance 41b1ce5d-a8ac-4b93-94a3-cf26367266d6 [ 1402.629671] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697443, 'name': PowerOffVM_Task, 'duration_secs': 0.137239} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.629955] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.630302] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1402.631412] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9fbd90-39f4-49fb-a6fb-e865303762b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.641198] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.642182] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4ca1da1-8408-45ce-bedb-1f46cfd8b9b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.678401] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1402.678535] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1402.678717] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Deleting the datastore file [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1402.678994] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdaedf4c-8c3d-439b-ba18-3d243c0f2b39 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.687082] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1402.687082] env[63297]: value = "task-1697446" [ 1402.687082] env[63297]: _type = "Task" [ 1402.687082] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.696965] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.747217] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b8c16-1e3d-b2d9-94d6-feaf060f0206, 'name': SearchDatastore_Task, 'duration_secs': 0.023719} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.747910] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.747910] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c147f97d-7fae-4364-a9c0-04978df2450f/c147f97d-7fae-4364-a9c0-04978df2450f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1402.747910] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4429585-99a7-4b08-8199-61c77f8c4460 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.757690] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1402.757690] env[63297]: value = "task-1697447" [ 1402.757690] env[63297]: _type = "Task" [ 1402.757690] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.767848] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697447, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.815112] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.880196] env[63297]: DEBUG nova.compute.manager [req-49f492bc-ad32-4408-a9ab-4dfdefb9de52 req-99c484e5-5650-47a9-9923-6a7ff9d3cebd service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Received event network-vif-plugged-4c61e911-9830-4ecf-b4e8-2b56f86048dc {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1402.880434] env[63297]: DEBUG oslo_concurrency.lockutils [req-49f492bc-ad32-4408-a9ab-4dfdefb9de52 req-99c484e5-5650-47a9-9923-6a7ff9d3cebd service nova] Acquiring lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.880727] env[63297]: DEBUG oslo_concurrency.lockutils [req-49f492bc-ad32-4408-a9ab-4dfdefb9de52 req-99c484e5-5650-47a9-9923-6a7ff9d3cebd service nova] Lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.880918] env[63297]: DEBUG oslo_concurrency.lockutils [req-49f492bc-ad32-4408-a9ab-4dfdefb9de52 req-99c484e5-5650-47a9-9923-6a7ff9d3cebd service nova] Lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.881879] env[63297]: DEBUG nova.compute.manager [req-49f492bc-ad32-4408-a9ab-4dfdefb9de52 req-99c484e5-5650-47a9-9923-6a7ff9d3cebd service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] No waiting events found dispatching network-vif-plugged-4c61e911-9830-4ecf-b4e8-2b56f86048dc {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1402.881879] env[63297]: WARNING nova.compute.manager [req-49f492bc-ad32-4408-a9ab-4dfdefb9de52 req-99c484e5-5650-47a9-9923-6a7ff9d3cebd service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Received unexpected event network-vif-plugged-4c61e911-9830-4ecf-b4e8-2b56f86048dc for instance with vm_state building and task_state spawning. [ 1402.919456] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1402.919686] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353888', 'volume_id': '1f16c2ed-7350-4649-be78-689cfc165090', 'name': 'volume-1f16c2ed-7350-4649-be78-689cfc165090', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5124f7fb-1293-4964-98c4-426ecfce7d10', 'attached_at': '', 'detached_at': '', 'volume_id': '1f16c2ed-7350-4649-be78-689cfc165090', 'serial': '1f16c2ed-7350-4649-be78-689cfc165090'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1402.920711] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46a5499-c096-4400-a484-f7d80ad09ff7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.942146] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aef0256-6f53-4800-b65f-44d6409a6881 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.968788] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] volume-1f16c2ed-7350-4649-be78-689cfc165090/volume-1f16c2ed-7350-4649-be78-689cfc165090.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.969089] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-679bb6c4-167e-41ca-8021-a49c8fd1cfb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.993070] env[63297]: DEBUG oslo_vmware.api [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1402.993070] env[63297]: value = "task-1697448" [ 1402.993070] env[63297]: _type = "Task" [ 1402.993070] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.996483] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081729} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.999780] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1403.000636] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63d7d77-1e70-4c91-93fc-7f28ae6ec2f9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.026057] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1403.026057] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Successfully updated port: 4c61e911-9830-4ecf-b4e8-2b56f86048dc {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1403.030478] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27dabe10-8624-488d-b1c5-3e1c65ebe1bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.046194] env[63297]: DEBUG oslo_vmware.api [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697448, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.054688] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1403.054688] env[63297]: value = "task-1697449" [ 1403.054688] env[63297]: _type = "Task" [ 1403.054688] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.071099] env[63297]: INFO nova.compute.claims [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1403.074992] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697449, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.096110] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d58713a7-cf85-4d98-9ec5-7fafe336378f tempest-ServerMetadataTestJSON-331141426 tempest-ServerMetadataTestJSON-331141426-project-member] Lock "41b1ce5d-a8ac-4b93-94a3-cf26367266d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.777s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.199732] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159803} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.200098] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1403.200542] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1403.200873] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1403.269637] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697447, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.512609] env[63297]: DEBUG oslo_vmware.api [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697448, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.548944] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "refresh_cache-c83c23d9-a8ec-4a87-8a8c-067e18d2615a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.548944] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "refresh_cache-c83c23d9-a8ec-4a87-8a8c-067e18d2615a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.548944] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1403.573623] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.578730] env[63297]: INFO nova.compute.resource_tracker [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating resource usage from migration 0d35076e-6ba5-42c2-89c8-51053c88aa3a [ 1403.776206] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697447, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.787042} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.779258] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c147f97d-7fae-4364-a9c0-04978df2450f/c147f97d-7fae-4364-a9c0-04978df2450f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1403.779509] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1403.780338] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0433db3-9c7b-4ddf-af9d-a4b9196928c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.788840] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1403.788840] env[63297]: value = "task-1697450" [ 1403.788840] env[63297]: _type = "Task" [ 1403.788840] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.804793] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697450, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.011783] env[63297]: DEBUG oslo_vmware.api [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697448, 'name': ReconfigVM_Task, 'duration_secs': 0.833742} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.011783] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Reconfigured VM instance instance-0000002d to attach disk [datastore1] volume-1f16c2ed-7350-4649-be78-689cfc165090/volume-1f16c2ed-7350-4649-be78-689cfc165090.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1404.019703] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60af33fa-faa5-403c-9a02-a5775ab8349f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.037288] env[63297]: DEBUG oslo_vmware.api [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1404.037288] env[63297]: value = "task-1697451" [ 1404.037288] env[63297]: _type = "Task" [ 1404.037288] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.050185] env[63297]: DEBUG oslo_vmware.api [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697451, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.066604] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697449, 'name': ReconfigVM_Task, 'duration_secs': 0.737406} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.067101] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Reconfigured VM instance instance-00000015 to attach disk [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1404.067653] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a7d4825-5090-4e93-a34a-183c542526e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.079129] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1404.079129] env[63297]: value = "task-1697452" [ 1404.079129] env[63297]: _type = "Task" [ 1404.079129] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.087069] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697452, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.100775] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1404.177372] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b987b8-46be-4d64-9fa7-052052b82bde {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.186812] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0942b46-faab-4c20-87bc-67d31cf24b3c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.237056] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f59e022-5596-41e1-9108-9b9fd25d7f9b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.243353] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016e2873-1845-44f7-947e-31da7f1be89e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.264294] env[63297]: DEBUG nova.compute.provider_tree [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1404.274182] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1404.274424] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1404.274581] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1404.274761] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1404.274904] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1404.275430] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1404.275703] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1404.275879] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1404.276091] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1404.276267] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1404.276446] env[63297]: DEBUG nova.virt.hardware [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1404.277331] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288604c8-65d0-4a37-93d1-d760bc394bb8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.288891] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce68003-90cd-4436-bfd6-5aa5b56e164a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.310675] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697450, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076975} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.319203] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1404.319454] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1404.325348] env[63297]: DEBUG oslo.service.loopingcall [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.329155] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf6455c-f9c3-4ff1-8545-24ccec3c3e78 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.329155] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1404.329392] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8686d4f-2397-427e-aeaa-273c161c63dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.364203] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] c147f97d-7fae-4364-a9c0-04978df2450f/c147f97d-7fae-4364-a9c0-04978df2450f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1404.366096] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec1f6d1e-872c-43f9-8b83-956e784b570c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.380640] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1404.380640] env[63297]: value = "task-1697453" [ 1404.380640] env[63297]: _type = "Task" [ 1404.380640] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.392269] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697453, 'name': CreateVM_Task} progress is 15%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.396393] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1404.396393] env[63297]: value = "task-1697454" [ 1404.396393] env[63297]: _type = "Task" [ 1404.396393] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.405357] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697454, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.481194] env[63297]: DEBUG nova.network.neutron [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Updating instance_info_cache with network_info: [{"id": "4c61e911-9830-4ecf-b4e8-2b56f86048dc", "address": "fa:16:3e:90:61:d1", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c61e911-98", "ovs_interfaceid": "4c61e911-9830-4ecf-b4e8-2b56f86048dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.551601] env[63297]: DEBUG oslo_vmware.api [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697451, 'name': ReconfigVM_Task, 'duration_secs': 0.178397} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.551601] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353888', 'volume_id': '1f16c2ed-7350-4649-be78-689cfc165090', 'name': 'volume-1f16c2ed-7350-4649-be78-689cfc165090', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5124f7fb-1293-4964-98c4-426ecfce7d10', 'attached_at': '', 'detached_at': '', 'volume_id': '1f16c2ed-7350-4649-be78-689cfc165090', 'serial': '1f16c2ed-7350-4649-be78-689cfc165090'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1404.589309] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697452, 'name': Rename_Task, 'duration_secs': 0.269659} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.589608] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.589915] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5dfdc80-7d21-49c1-a1a4-fb0aef033497 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.602300] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1404.602300] env[63297]: value = "task-1697455" [ 1404.602300] env[63297]: _type = "Task" [ 1404.602300] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.612843] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.766551] env[63297]: DEBUG nova.scheduler.client.report [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1404.892376] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697453, 'name': CreateVM_Task, 'duration_secs': 0.302477} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.892529] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1404.893030] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.893255] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.893648] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1404.893940] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1c8b498-8780-4e4c-bdf1-83295eb3b20e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.904331] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1404.904331] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527ec618-7dbb-b70a-e5e5-021ff716082d" [ 1404.904331] env[63297]: _type = "Task" [ 1404.904331] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.911722] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697454, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.918311] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527ec618-7dbb-b70a-e5e5-021ff716082d, 'name': SearchDatastore_Task, 'duration_secs': 0.014409} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.918811] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.919103] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.919348] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.919493] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.919685] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.920368] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c9516bd-a44f-4670-94ff-c7f6fcc29216 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.933809] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.934061] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1404.934831] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0f2374b-a96a-4283-8041-010813a7a076 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.941557] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1404.941557] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52296bab-b83a-00b4-d774-2923c8db65aa" [ 1404.941557] env[63297]: _type = "Task" [ 1404.941557] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.950791] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52296bab-b83a-00b4-d774-2923c8db65aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.967683] env[63297]: DEBUG nova.compute.manager [req-73311d65-a855-46bc-afa6-f7345ac48cca req-7ddcc721-4c33-4e4d-b88b-039a4e3a2242 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Received event network-changed-4c61e911-9830-4ecf-b4e8-2b56f86048dc {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1404.967880] env[63297]: DEBUG nova.compute.manager [req-73311d65-a855-46bc-afa6-f7345ac48cca req-7ddcc721-4c33-4e4d-b88b-039a4e3a2242 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Refreshing instance network info cache due to event network-changed-4c61e911-9830-4ecf-b4e8-2b56f86048dc. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1404.968295] env[63297]: DEBUG oslo_concurrency.lockutils [req-73311d65-a855-46bc-afa6-f7345ac48cca req-7ddcc721-4c33-4e4d-b88b-039a4e3a2242 service nova] Acquiring lock "refresh_cache-c83c23d9-a8ec-4a87-8a8c-067e18d2615a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.983808] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "refresh_cache-c83c23d9-a8ec-4a87-8a8c-067e18d2615a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.984229] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Instance network_info: |[{"id": "4c61e911-9830-4ecf-b4e8-2b56f86048dc", "address": "fa:16:3e:90:61:d1", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c61e911-98", "ovs_interfaceid": "4c61e911-9830-4ecf-b4e8-2b56f86048dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1404.984544] env[63297]: DEBUG oslo_concurrency.lockutils [req-73311d65-a855-46bc-afa6-f7345ac48cca req-7ddcc721-4c33-4e4d-b88b-039a4e3a2242 service nova] Acquired lock "refresh_cache-c83c23d9-a8ec-4a87-8a8c-067e18d2615a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.984726] env[63297]: DEBUG nova.network.neutron [req-73311d65-a855-46bc-afa6-f7345ac48cca req-7ddcc721-4c33-4e4d-b88b-039a4e3a2242 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Refreshing network info cache for port 4c61e911-9830-4ecf-b4e8-2b56f86048dc {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.986212] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:61:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c61e911-9830-4ecf-b4e8-2b56f86048dc', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1404.994075] env[63297]: DEBUG oslo.service.loopingcall [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.997509] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1404.997995] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86ea6f3d-405d-40de-b27f-32d477de0e6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.020768] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1405.020768] env[63297]: value = "task-1697456" [ 1405.020768] env[63297]: _type = "Task" [ 1405.020768] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.030245] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697456, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.119302] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697455, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.271728] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.706s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.271941] env[63297]: INFO nova.compute.manager [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Migrating [ 1405.279099] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.251s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.280722] env[63297]: INFO nova.compute.claims [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1405.407835] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697454, 'name': ReconfigVM_Task, 'duration_secs': 0.576441} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.412568] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Reconfigured VM instance instance-00000039 to attach disk [datastore1] c147f97d-7fae-4364-a9c0-04978df2450f/c147f97d-7fae-4364-a9c0-04978df2450f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1405.414279] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-721d9d49-7def-4bdb-b837-00db9399e882 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.425471] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1405.425471] env[63297]: value = "task-1697457" [ 1405.425471] env[63297]: _type = "Task" [ 1405.425471] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.437027] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697457, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.453535] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52296bab-b83a-00b4-d774-2923c8db65aa, 'name': SearchDatastore_Task, 'duration_secs': 0.022917} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.454660] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-314c68b4-1bbc-487d-9f3c-a622824d595f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.461308] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1405.461308] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529b7f78-2d19-1dba-a548-90081c91b925" [ 1405.461308] env[63297]: _type = "Task" [ 1405.461308] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.473736] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529b7f78-2d19-1dba-a548-90081c91b925, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.532179] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697456, 'name': CreateVM_Task, 'duration_secs': 0.435695} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.533057] env[63297]: DEBUG nova.network.neutron [req-73311d65-a855-46bc-afa6-f7345ac48cca req-7ddcc721-4c33-4e4d-b88b-039a4e3a2242 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Updated VIF entry in instance network info cache for port 4c61e911-9830-4ecf-b4e8-2b56f86048dc. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1405.533416] env[63297]: DEBUG nova.network.neutron [req-73311d65-a855-46bc-afa6-f7345ac48cca req-7ddcc721-4c33-4e4d-b88b-039a4e3a2242 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Updating instance_info_cache with network_info: [{"id": "4c61e911-9830-4ecf-b4e8-2b56f86048dc", "address": "fa:16:3e:90:61:d1", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c61e911-98", "ovs_interfaceid": "4c61e911-9830-4ecf-b4e8-2b56f86048dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.535851] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1405.535851] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.535851] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.535851] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1405.536332] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-677233b8-2e42-4970-8d63-2530c75c66cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.542502] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1405.542502] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cc089e-4a88-21f3-d1eb-1c2fd9c2838c" [ 1405.542502] env[63297]: _type = "Task" [ 1405.542502] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.552701] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cc089e-4a88-21f3-d1eb-1c2fd9c2838c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.589903] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquiring lock "35c68986-51b5-43ba-a076-aca3c86d68bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.590193] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.590404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquiring lock "35c68986-51b5-43ba-a076-aca3c86d68bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.590585] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.590797] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.592931] env[63297]: INFO nova.compute.manager [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Terminating instance [ 1405.594845] env[63297]: DEBUG nova.compute.manager [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1405.595066] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1405.595916] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101084cf-6be9-4f3e-978f-95a724889dc2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.604699] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1405.604934] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f016d878-0aa0-4904-8862-90b5b1f30c26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.608479] env[63297]: DEBUG nova.objects.instance [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1405.614294] env[63297]: DEBUG oslo_vmware.api [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1405.614294] env[63297]: value = "task-1697458" [ 1405.614294] env[63297]: _type = "Task" [ 1405.614294] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.623141] env[63297]: DEBUG oslo_vmware.api [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697455, 'name': PowerOnVM_Task, 'duration_secs': 0.74689} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.623870] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.624158] env[63297]: DEBUG nova.compute.manager [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1405.624971] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1036a4c6-7234-4f53-a47d-ff1e4c4d0a11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.633340] env[63297]: DEBUG oslo_vmware.api [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1697458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.764033] env[63297]: DEBUG oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526ffe12-1cc2-60e8-9e03-4e919fdf2a03/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1405.764303] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42226517-d8bd-4b59-a2fc-6e6a6dc9c1e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.774374] env[63297]: DEBUG oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526ffe12-1cc2-60e8-9e03-4e919fdf2a03/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1405.774537] env[63297]: ERROR oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526ffe12-1cc2-60e8-9e03-4e919fdf2a03/disk-0.vmdk due to incomplete transfer. [ 1405.774797] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d3dfc656-2012-4129-9d8b-c645416d2db3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.786394] env[63297]: DEBUG oslo_vmware.rw_handles [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526ffe12-1cc2-60e8-9e03-4e919fdf2a03/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1405.786595] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Uploaded image f8358d93-1d74-4bbd-acb8-9b4db000374d to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1405.789197] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1405.790026] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4d335e21-5293-4eaa-8f60-b45e502225a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.791837] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.791997] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.792862] env[63297]: DEBUG nova.network.neutron [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1405.801599] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1405.801599] env[63297]: value = "task-1697459" [ 1405.801599] env[63297]: _type = "Task" [ 1405.801599] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.812379] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697459, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.936673] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697457, 'name': Rename_Task, 'duration_secs': 0.317395} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.936933] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1405.937195] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0b4e04a-0753-4779-998a-793baca826cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.945097] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1405.945097] env[63297]: value = "task-1697460" [ 1405.945097] env[63297]: _type = "Task" [ 1405.945097] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.954758] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697460, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.971548] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529b7f78-2d19-1dba-a548-90081c91b925, 'name': SearchDatastore_Task, 'duration_secs': 0.014039} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.971928] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.972151] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6/fb33135a-073d-4d80-9833-5b29afae1cc6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1405.972422] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bed55b1f-6287-4505-8300-5e61556aafc0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.982025] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1405.982025] env[63297]: value = "task-1697461" [ 1405.982025] env[63297]: _type = "Task" [ 1405.982025] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.991882] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.037356] env[63297]: DEBUG oslo_concurrency.lockutils [req-73311d65-a855-46bc-afa6-f7345ac48cca req-7ddcc721-4c33-4e4d-b88b-039a4e3a2242 service nova] Releasing lock "refresh_cache-c83c23d9-a8ec-4a87-8a8c-067e18d2615a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.054680] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cc089e-4a88-21f3-d1eb-1c2fd9c2838c, 'name': SearchDatastore_Task, 'duration_secs': 0.025215} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.054917] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.055204] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1406.055451] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.055597] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.055782] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1406.056074] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19028552-2722-4a7a-836f-be3fd47b4073 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.076584] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1406.076796] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1406.077698] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e8bff2c-0490-42a7-93af-670803a2eb34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.084185] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1406.084185] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d4c235-9200-a09b-a48e-15ba08932343" [ 1406.084185] env[63297]: _type = "Task" [ 1406.084185] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.093072] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d4c235-9200-a09b-a48e-15ba08932343, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.120941] env[63297]: DEBUG oslo_concurrency.lockutils [None req-62b47c83-e94d-450b-bdfa-42e46e0087de tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.350s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.130716] env[63297]: DEBUG oslo_vmware.api [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1697458, 'name': PowerOffVM_Task, 'duration_secs': 0.228264} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.131089] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1406.131789] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1406.131789] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f9681ad-b106-402c-80f7-ccdb6f189cc7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.152227] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.272168] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.272527] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.272708] env[63297]: DEBUG nova.compute.manager [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1406.274050] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15860a9-0316-4398-b538-d7c7e17b5b7d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.283093] env[63297]: DEBUG nova.compute.manager [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1406.284382] env[63297]: DEBUG nova.objects.instance [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1406.296746] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1406.296932] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1406.297213] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Deleting the datastore file [datastore1] 35c68986-51b5-43ba-a076-aca3c86d68bc {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1406.300321] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0de80417-d0b4-4abe-85c7-91e8057e1a0f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.319154] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697459, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.319154] env[63297]: DEBUG oslo_vmware.api [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for the task: (returnval){ [ 1406.319154] env[63297]: value = "task-1697463" [ 1406.319154] env[63297]: _type = "Task" [ 1406.319154] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.331290] env[63297]: DEBUG oslo_vmware.api [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1697463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.463764] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697460, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.501092] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697461, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.550184] env[63297]: DEBUG nova.network.neutron [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance_info_cache with network_info: [{"id": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "address": "fa:16:3e:21:7c:1d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9a7e1f4-1a", "ovs_interfaceid": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.597649] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d4c235-9200-a09b-a48e-15ba08932343, 'name': SearchDatastore_Task, 'duration_secs': 0.023387} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.600987] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-980e6289-8d44-4406-9fe8-91a07ec166f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.609489] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1406.609489] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d658b-7a88-f850-4bdc-02dd2bc6e39c" [ 1406.609489] env[63297]: _type = "Task" [ 1406.609489] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.620063] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d658b-7a88-f850-4bdc-02dd2bc6e39c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.792794] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1406.793136] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-228d2d14-02be-4b5b-9452-b5decb10bcf3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.799071] env[63297]: INFO nova.compute.manager [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Rebuilding instance [ 1406.802820] env[63297]: DEBUG oslo_vmware.api [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1406.802820] env[63297]: value = "task-1697464" [ 1406.802820] env[63297]: _type = "Task" [ 1406.802820] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.818740] env[63297]: DEBUG oslo_vmware.api [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.827744] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697459, 'name': Destroy_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.840202] env[63297]: DEBUG oslo_vmware.api [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1697463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.853404] env[63297]: DEBUG nova.compute.manager [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1406.853404] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37bcbce-856d-4796-978c-c45a4121f70d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.916881] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1953c416-d78e-4926-8ae1-22d075e0c2a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.925773] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dfbeb1-8127-4093-b12e-8085d94cdb20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.956627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.956888] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.957111] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.957293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.957462] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.962597] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac99d61-3aec-4dd2-a1b0-13857556f67e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.966992] env[63297]: INFO nova.compute.manager [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Terminating instance [ 1406.968112] env[63297]: DEBUG nova.compute.manager [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1406.968334] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1406.969152] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3909fe5b-e403-44c4-b24a-23d46a1f9b03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.978202] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697460, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.979798] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633c4ecc-0cf5-4bbd-8a47-3bd4404c7212 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.985772] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1406.988889] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7150547-c324-4fec-b335-a13a27a8f441 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.999333] env[63297]: DEBUG nova.compute.provider_tree [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1407.003905] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697461, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80902} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.004412] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6/fb33135a-073d-4d80-9833-5b29afae1cc6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1407.004635] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1407.004888] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21d47b88-f7e0-493a-ba32-0e8366bb4edf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.012697] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1407.012697] env[63297]: value = "task-1697466" [ 1407.012697] env[63297]: _type = "Task" [ 1407.012697] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.023425] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697466, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.056016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.062610] env[63297]: INFO nova.compute.manager [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Rebuilding instance [ 1407.067756] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1407.068531] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1407.068531] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleting the datastore file [datastore1] 6ce88b93-aa42-4f34-81fa-6c09c23ace81 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1407.068670] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e0762bd-cb2b-42e8-8d77-f03cad773ad7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.078040] env[63297]: DEBUG oslo_vmware.api [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1407.078040] env[63297]: value = "task-1697467" [ 1407.078040] env[63297]: _type = "Task" [ 1407.078040] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.093207] env[63297]: DEBUG oslo_vmware.api [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.117939] env[63297]: DEBUG nova.compute.manager [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1407.119903] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7d6065-f0f2-4d32-b26e-21f737531576 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.126459] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d658b-7a88-f850-4bdc-02dd2bc6e39c, 'name': SearchDatastore_Task, 'duration_secs': 0.057603} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.127161] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.127438] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c83c23d9-a8ec-4a87-8a8c-067e18d2615a/c83c23d9-a8ec-4a87-8a8c-067e18d2615a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1407.127712] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1bc19aa-8c4a-42c4-af59-05cc637c04ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.140618] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1407.140618] env[63297]: value = "task-1697468" [ 1407.140618] env[63297]: _type = "Task" [ 1407.140618] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.151294] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697468, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.323586] env[63297]: DEBUG oslo_vmware.api [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697464, 'name': PowerOffVM_Task, 'duration_secs': 0.407869} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.323586] env[63297]: DEBUG oslo_vmware.api [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697459, 'name': Destroy_Task, 'duration_secs': 1.033058} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.323817] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1407.323915] env[63297]: DEBUG nova.compute.manager [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1407.324193] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Destroyed the VM [ 1407.324450] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1407.325222] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314159ba-f291-499d-a6ec-4d594f1f3949 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.328191] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-20ba50a3-31e2-4ff8-bd8c-f9e32a7af07d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.335861] env[63297]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1407.336036] env[63297]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=63297) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1407.336441] env[63297]: DEBUG nova.compute.utils [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Cleaning up image f8358d93-1d74-4bbd-acb8-9b4db000374d {{(pid=63297) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 1407.343739] env[63297]: DEBUG oslo_vmware.api [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Task: {'id': task-1697463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.632431} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.345856] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1407.346040] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1407.346213] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1407.346376] env[63297]: INFO nova.compute.manager [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Took 1.75 seconds to destroy the instance on the hypervisor. [ 1407.346601] env[63297]: DEBUG oslo.service.loopingcall [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1407.346977] env[63297]: DEBUG nova.compute.manager [-] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1407.347092] env[63297]: DEBUG nova.network.neutron [-] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1407.368426] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1407.368788] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9a4bee2-14df-4311-a245-2f97c683b34e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.377552] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1407.377552] env[63297]: value = "task-1697469" [ 1407.377552] env[63297]: _type = "Task" [ 1407.377552] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.387435] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697469, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.467591] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697460, 'name': PowerOnVM_Task, 'duration_secs': 1.302236} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.468619] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1407.468619] env[63297]: INFO nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Took 10.02 seconds to spawn the instance on the hypervisor. [ 1407.468619] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1407.469742] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3883659d-c592-4e94-852d-a3e6990c24c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.533029] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697466, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077847} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.534183] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1407.535349] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18879f2c-2216-4c43-9d29-0f20628da415 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.556296] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6/fb33135a-073d-4d80-9833-5b29afae1cc6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1407.556919] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3fed59a-3b73-43f9-af13-aa526cf804e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.576207] env[63297]: ERROR nova.scheduler.client.report [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [req-5c0da4dd-11e4-46f8-b4f2-d69b4dd277c5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5c0da4dd-11e4-46f8-b4f2-d69b4dd277c5"}]} [ 1407.593020] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1407.593020] env[63297]: value = "task-1697470" [ 1407.593020] env[63297]: _type = "Task" [ 1407.593020] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.600777] env[63297]: WARNING nova.virt.vmwareapi.vmops [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 6ce88b93-aa42-4f34-81fa-6c09c23ace81 [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Traceback (most recent call last): [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1113, in _destroy_instance [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] ds_util.file_delete(self._session, [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] session._wait_for_task(file_delete_task) [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] return self.wait_for_task(task_ref) [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] return evt.wait() [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] result = hub.switch() [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] return self.greenlet.switch() [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] self.f(*self.args, **self.kw) [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] raise exceptions.translate_fault(task_info.error) [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 6ce88b93-aa42-4f34-81fa-6c09c23ace81 [ 1407.600777] env[63297]: ERROR nova.virt.vmwareapi.vmops [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] [ 1407.600777] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1407.600777] env[63297]: INFO nova.compute.manager [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1407.601300] env[63297]: DEBUG oslo.service.loopingcall [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1407.602093] env[63297]: DEBUG nova.scheduler.client.report [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1407.604256] env[63297]: DEBUG nova.compute.manager [-] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1407.604352] env[63297]: DEBUG nova.network.neutron [-] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1407.613434] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697470, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.626021] env[63297]: DEBUG nova.scheduler.client.report [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1407.626021] env[63297]: DEBUG nova.compute.provider_tree [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1407.636747] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1407.637520] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba693715-77e5-4945-9a60-256a8b1dd0a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.652021] env[63297]: DEBUG nova.scheduler.client.report [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1407.652470] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1407.652470] env[63297]: value = "task-1697471" [ 1407.652470] env[63297]: _type = "Task" [ 1407.652470] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.663494] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697468, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.671728] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.673111] env[63297]: DEBUG nova.scheduler.client.report [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1407.849030] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "71faf167-dfe3-4792-9841-b5ab4b333884" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.849511] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "71faf167-dfe3-4792-9841-b5ab4b333884" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.853140] env[63297]: DEBUG oslo_concurrency.lockutils [None req-39c7c0ba-bde9-4bd6-aed5-bfbbba6a7a28 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.581s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.899553] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697469, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.993028] env[63297]: INFO nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Took 43.79 seconds to build instance. [ 1408.076155] env[63297]: DEBUG nova.compute.manager [req-61cbd79e-4456-4af4-91d2-9ac5606055e6 req-a25b4041-f601-4ff3-a273-0c3b748a0d6d service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Received event network-vif-deleted-8cf1041d-9ff1-4cf4-808d-40d2edaf0e06 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1408.076155] env[63297]: INFO nova.compute.manager [req-61cbd79e-4456-4af4-91d2-9ac5606055e6 req-a25b4041-f601-4ff3-a273-0c3b748a0d6d service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Neutron deleted interface 8cf1041d-9ff1-4cf4-808d-40d2edaf0e06; detaching it from the instance and deleting it from the info cache [ 1408.076155] env[63297]: DEBUG nova.network.neutron [req-61cbd79e-4456-4af4-91d2-9ac5606055e6 req-a25b4041-f601-4ff3-a273-0c3b748a0d6d service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.103947] env[63297]: DEBUG nova.compute.manager [req-420361bd-a6a9-4b8f-9876-98526b545713 req-073a5735-96c2-41ec-9588-dddd7e551b07 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Received event network-vif-deleted-a3fa7a52-2cf8-470b-951c-9f0de053dd1a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1408.104141] env[63297]: INFO nova.compute.manager [req-420361bd-a6a9-4b8f-9876-98526b545713 req-073a5735-96c2-41ec-9588-dddd7e551b07 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Neutron deleted interface a3fa7a52-2cf8-470b-951c-9f0de053dd1a; detaching it from the instance and deleting it from the info cache [ 1408.104317] env[63297]: DEBUG nova.network.neutron [req-420361bd-a6a9-4b8f-9876-98526b545713 req-073a5735-96c2-41ec-9588-dddd7e551b07 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.111942] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697470, 'name': ReconfigVM_Task, 'duration_secs': 0.470997} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.111942] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Reconfigured VM instance instance-00000038 to attach disk [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6/fb33135a-073d-4d80-9833-5b29afae1cc6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1408.112564] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e13af33c-efc8-4365-887c-84c79c60cab0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.121478] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1408.121478] env[63297]: value = "task-1697472" [ 1408.121478] env[63297]: _type = "Task" [ 1408.121478] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.135308] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697472, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.152451] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697468, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.82657} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.155274] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c83c23d9-a8ec-4a87-8a8c-067e18d2615a/c83c23d9-a8ec-4a87-8a8c-067e18d2615a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1408.155495] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1408.155941] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe4def2a-d314-4911-a4d6-055ddf1299e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.166743] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697471, 'name': PowerOffVM_Task, 'duration_secs': 0.459848} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.170270] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1408.171085] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1408.171469] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1408.171469] env[63297]: value = "task-1697473" [ 1408.171469] env[63297]: _type = "Task" [ 1408.171469] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.171861] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-620a7664-13c7-4987-98cb-2ff9ce726d2a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.182500] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.186542] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1408.186542] env[63297]: value = "task-1697474" [ 1408.186542] env[63297]: _type = "Task" [ 1408.186542] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.200501] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1408.200757] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1408.201099] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353831', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'name': 'volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a57c0638-e14b-4474-a6b4-7184d7e2a0fe', 'attached_at': '', 'detached_at': '', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'serial': '950dc879-aa21-4a03-88c5-e4e67d4e27c0'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1408.202171] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f993105-b5df-4d85-a01f-a80b0da61c46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.225846] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45e0b8f-32ae-45fa-9c4d-d71969e66066 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.234980] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fca326-0d7c-44a9-b855-23ddd417411e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.259431] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a956fead-a645-4446-8478-55f20fec8f59 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.275859] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] The volume has not been displaced from its original location: [datastore1] volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0/volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1408.281494] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Reconfiguring VM instance instance-00000031 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1408.284540] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5da9b45e-fb24-4022-8abc-cfc99967426d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.304229] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1408.304229] env[63297]: value = "task-1697475" [ 1408.304229] env[63297]: _type = "Task" [ 1408.304229] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.316462] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697475, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.370063] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a507783c-e349-452c-b4b3-8c90ff194216 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.376816] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1940d9a2-7cbc-4601-b0f6-9da3171c46c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.412987] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697469, 'name': PowerOffVM_Task, 'duration_secs': 0.644081} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.414315] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1408.414544] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1408.415295] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4ceb23-fcbe-4f71-a43b-9a08304292d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.418354] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e8c87f-ae3d-49f5-aa59-234783742408 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.428838] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d84c037-b0ce-4713-a458-23ea3fda17f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.432461] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1408.432917] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56e2f0e8-967e-4aa7-96ef-e9da5f59242e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.176659] env[63297]: DEBUG nova.network.neutron [-] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.177793] env[63297]: DEBUG nova.network.neutron [-] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.178797] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c147f97d-7fae-4364-a9c0-04978df2450f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.137s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.182913] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.183418] env[63297]: DEBUG nova.compute.provider_tree [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1409.186755] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7f02aef-b4be-4ab1-ac56-9ab9a6894c9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.188551] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a472309d-3f06-44b9-a85d-e949694a2deb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.200130] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e5a257-b530-4355-96f8-ccc302d39f7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.203187] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1409.203485] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1409.203721] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleting the datastore file [datastore1] b14e8466-68ab-4705-a439-6db961a149b0 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1409.206336] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99aa010e-24a0-420a-adb4-24dfb29c3eae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.221925] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070858} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.222533] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697472, 'name': Rename_Task, 'duration_secs': 0.154175} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.239698] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1409.242606] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5e4817-5dbd-4341-808c-3ad80cf38208 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.255531] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f577ceac-cd10-4117-a3d3-f72364c4c5a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.265604] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1409.266321] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance 'b95b7656-70ac-4eaf-9934-4b4c50e78035' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1409.269847] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697475, 'name': ReconfigVM_Task, 'duration_secs': 0.422119} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.271220] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5897f861-62e4-4fa5-8e54-63e7270191ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.273649] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f83d51a8-b1c0-49ed-9dc3-bcfeba8aa256 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.276798] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Reconfigured VM instance instance-00000031 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1409.284043] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7bf364f-8aba-464b-ad6b-a69bb2651858 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.294017] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1409.294017] env[63297]: value = "task-1697477" [ 1409.294017] env[63297]: _type = "Task" [ 1409.294017] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.332422] env[63297]: DEBUG nova.compute.manager [req-420361bd-a6a9-4b8f-9876-98526b545713 req-073a5735-96c2-41ec-9588-dddd7e551b07 service nova] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Detach interface failed, port_id=a3fa7a52-2cf8-470b-951c-9f0de053dd1a, reason: Instance 6ce88b93-aa42-4f34-81fa-6c09c23ace81 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1409.333066] env[63297]: DEBUG nova.compute.manager [req-61cbd79e-4456-4af4-91d2-9ac5606055e6 req-a25b4041-f601-4ff3-a273-0c3b748a0d6d service nova] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Detach interface failed, port_id=8cf1041d-9ff1-4cf4-808d-40d2edaf0e06, reason: Instance 35c68986-51b5-43ba-a076-aca3c86d68bc could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1409.341998] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] c83c23d9-a8ec-4a87-8a8c-067e18d2615a/c83c23d9-a8ec-4a87-8a8c-067e18d2615a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1409.345491] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d47f729-c840-45f5-b8f6-ec8990a27ff6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.360145] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1409.360145] env[63297]: value = "task-1697479" [ 1409.360145] env[63297]: _type = "Task" [ 1409.360145] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.360415] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1409.360415] env[63297]: value = "task-1697478" [ 1409.360415] env[63297]: _type = "Task" [ 1409.360415] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.363867] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.371772] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1409.371772] env[63297]: value = "task-1697480" [ 1409.371772] env[63297]: _type = "Task" [ 1409.371772] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.381648] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697478, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.382243] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697479, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.390309] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697480, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.687438] env[63297]: INFO nova.compute.manager [-] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Took 2.08 seconds to deallocate network for instance. [ 1409.687808] env[63297]: INFO nova.compute.manager [-] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Took 2.34 seconds to deallocate network for instance. [ 1409.706953] env[63297]: DEBUG nova.objects.instance [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1409.709084] env[63297]: DEBUG nova.scheduler.client.report [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1409.712863] env[63297]: DEBUG nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1409.772245] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1409.772572] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93557322-278b-453a-98d9-8f774ccb9c9f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.781106] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1409.781106] env[63297]: value = "task-1697481" [ 1409.781106] env[63297]: _type = "Task" [ 1409.781106] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.791221] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.807359] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357386} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.807634] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1409.808084] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1409.808084] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1409.876743] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697479, 'name': ReconfigVM_Task, 'duration_secs': 0.204423} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.882938] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353831', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'name': 'volume-950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a57c0638-e14b-4474-a6b4-7184d7e2a0fe', 'attached_at': '', 'detached_at': '', 'volume_id': '950dc879-aa21-4a03-88c5-e4e67d4e27c0', 'serial': '950dc879-aa21-4a03-88c5-e4e67d4e27c0'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1409.883259] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1409.883925] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697478, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.884664] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54047e02-b59b-470c-b583-41aec1c0162a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.893042] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697480, 'name': ReconfigVM_Task, 'duration_secs': 0.307095} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.895294] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Reconfigured VM instance instance-0000003a to attach disk [datastore1] c83c23d9-a8ec-4a87-8a8c-067e18d2615a/c83c23d9-a8ec-4a87-8a8c-067e18d2615a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1409.895971] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1409.896209] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-079a4b55-3552-44c0-b217-309e552069f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.897828] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc9988d0-95e9-4baa-9641-391eb935d9b9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.904701] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1409.904701] env[63297]: value = "task-1697482" [ 1409.904701] env[63297]: _type = "Task" [ 1409.904701] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.913748] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697482, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.981086] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1409.981334] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1409.981512] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Deleting the datastore file [datastore1] a57c0638-e14b-4474-a6b4-7184d7e2a0fe {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1409.981864] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10c682c2-2cb0-47fe-8b5b-a1022a3f1801 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.991491] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for the task: (returnval){ [ 1409.991491] env[63297]: value = "task-1697484" [ 1409.991491] env[63297]: _type = "Task" [ 1409.991491] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.007819] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697484, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.197455] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.198753] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.218445] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.939s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.219332] env[63297]: DEBUG nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1410.224512] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.224675] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.224848] env[63297]: DEBUG nova.network.neutron [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1410.225230] env[63297]: DEBUG nova.objects.instance [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'info_cache' on Instance uuid 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1410.226148] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.242s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.227574] env[63297]: INFO nova.compute.claims [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1410.246141] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.291738] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697481, 'name': PowerOffVM_Task, 'duration_secs': 0.258508} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.291990] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1410.292184] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance 'b95b7656-70ac-4eaf-9934-4b4c50e78035' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1410.374506] env[63297]: DEBUG oslo_vmware.api [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697478, 'name': PowerOnVM_Task, 'duration_secs': 0.732925} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.374784] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1410.375035] env[63297]: DEBUG nova.compute.manager [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1410.375810] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a78409-7d29-419e-b481-d4ea6a40f9f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.415471] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697482, 'name': Rename_Task, 'duration_secs': 0.191541} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.416405] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1410.416668] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-920e6fef-d4fa-4c92-8309-81483f3efec7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.424530] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1410.424530] env[63297]: value = "task-1697485" [ 1410.424530] env[63297]: _type = "Task" [ 1410.424530] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.432879] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697485, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.503655] env[63297]: DEBUG oslo_vmware.api [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Task: {'id': task-1697484, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174242} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.503953] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1410.504173] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1410.504356] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1410.563452] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1410.564646] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bd2b776-50fa-420c-a0ea-a0ad77813714 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.576501] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe84fbb2-9302-436c-a017-ea60e5ff96e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.614566] env[63297]: ERROR nova.compute.manager [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Failed to detach volume 950dc879-aa21-4a03-88c5-e4e67d4e27c0 from /dev/sda: nova.exception.InstanceNotFound: Instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe could not be found. [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Traceback (most recent call last): [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self.driver.rebuild(**kwargs) [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] raise NotImplementedError() [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] NotImplementedError [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] During handling of the above exception, another exception occurred: [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Traceback (most recent call last): [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self.driver.detach_volume(context, old_connection_info, [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] return self._volumeops.detach_volume(connection_info, instance) [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self._detach_volume_vmdk(connection_info, instance) [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] stable_ref.fetch_moref(session) [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] nova.exception.InstanceNotFound: Instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe could not be found. [ 1410.614566] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] [ 1410.726468] env[63297]: DEBUG nova.compute.utils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1410.727902] env[63297]: DEBUG nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1410.728132] env[63297]: DEBUG nova.network.neutron [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1410.734139] env[63297]: DEBUG nova.objects.base [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Object Instance<5124f7fb-1293-4964-98c4-426ecfce7d10> lazy-loaded attributes: flavor,info_cache {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1410.745318] env[63297]: DEBUG nova.compute.utils [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Build of instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe aborted: Failed to rebuild volume backed instance. {{(pid=63297) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1410.747601] env[63297]: ERROR nova.compute.manager [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe aborted: Failed to rebuild volume backed instance. [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Traceback (most recent call last): [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self.driver.rebuild(**kwargs) [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] raise NotImplementedError() [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] NotImplementedError [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] During handling of the above exception, another exception occurred: [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Traceback (most recent call last): [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self._detach_root_volume(context, instance, root_bdm) [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] with excutils.save_and_reraise_exception(): [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self.force_reraise() [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] raise self.value [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self.driver.detach_volume(context, old_connection_info, [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] return self._volumeops.detach_volume(connection_info, instance) [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self._detach_volume_vmdk(connection_info, instance) [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] stable_ref.fetch_moref(session) [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] nova.exception.InstanceNotFound: Instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe could not be found. [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] During handling of the above exception, another exception occurred: [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Traceback (most recent call last): [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 10866, in _error_out_instance_on_exception [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] yield [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 1410.747601] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self._do_rebuild_instance_with_claim( [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self._do_rebuild_instance( [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self._rebuild_default_impl(**kwargs) [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] self._rebuild_volume_backed_instance( [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] raise exception.BuildAbortException( [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] nova.exception.BuildAbortException: Build of instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe aborted: Failed to rebuild volume backed instance. [ 1410.748461] env[63297]: ERROR nova.compute.manager [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] [ 1410.782601] env[63297]: DEBUG nova.policy [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be5e07baf148496880261386dff8df76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e3dcd98ebe94a75a94322b03feba3b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1410.798437] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1410.798708] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1410.798869] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1410.799098] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1410.799404] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1410.799579] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1410.799801] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1410.799968] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1410.800155] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1410.800324] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1410.800505] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1410.806572] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ece391a-24bc-4500-9d11-d0c210325397 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.827089] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1410.827089] env[63297]: value = "task-1697486" [ 1410.827089] env[63297]: _type = "Task" [ 1410.827089] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.836836] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697486, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.848447] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1410.848447] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1410.848630] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1410.848697] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1410.848846] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1410.849075] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1410.849377] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1410.849553] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1410.849723] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1410.849925] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1410.850160] env[63297]: DEBUG nova.virt.hardware [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1410.851060] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb4b3e8-faaa-4a07-b7d4-2ce3df4c9379 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.861039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ab403d-9088-4c3f-9d59-adea8eea1da3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.876490] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:73:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '603a207a-5c56-4835-a1be-961da01f6f07', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1410.884188] env[63297]: DEBUG oslo.service.loopingcall [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1410.884368] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1410.884577] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c861f9be-9663-498e-9416-2a7d24f8e520 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.906957] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.909231] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1410.909231] env[63297]: value = "task-1697487" [ 1410.909231] env[63297]: _type = "Task" [ 1410.909231] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.919686] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697487, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.935511] env[63297]: DEBUG oslo_vmware.api [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697485, 'name': PowerOnVM_Task, 'duration_secs': 0.467421} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.935778] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1410.935991] env[63297]: INFO nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1410.936408] env[63297]: DEBUG nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1410.937225] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f286a1-add6-457a-8134-8d2a5f02156e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.184617] env[63297]: DEBUG nova.network.neutron [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Successfully created port: c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.231451] env[63297]: DEBUG nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1411.341680] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697486, 'name': ReconfigVM_Task, 'duration_secs': 0.208978} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.344249] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance 'b95b7656-70ac-4eaf-9934-4b4c50e78035' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1411.428710] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697487, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.463919] env[63297]: INFO nova.compute.manager [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Took 43.67 seconds to build instance. [ 1411.670545] env[63297]: DEBUG nova.network.neutron [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updating instance_info_cache with network_info: [{"id": "1b123801-2747-40a9-84bc-ae5dc9595556", "address": "fa:16:3e:42:cf:1d", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b123801-27", "ovs_interfaceid": "1b123801-2747-40a9-84bc-ae5dc9595556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.782867] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16abacd1-3bde-46a1-b7ce-e98637c8a573 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.792362] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6f1eb7-eea1-419f-ac7a-6cee65bf971e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.831605] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11de60c7-d3ca-4be0-9599-99d2f7a3f1b9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.840536] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23ac456-537e-40a2-9f44-e521352add48 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.857077] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:24:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5cd2709a-9ccb-43da-a92d-61f75514f90c',id=29,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-114049318',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1411.857201] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1411.857367] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1411.857573] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1411.857724] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1411.857874] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1411.858088] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1411.858249] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1411.858412] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1411.858570] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1411.858740] env[63297]: DEBUG nova.virt.hardware [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1411.864214] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1411.864859] env[63297]: DEBUG nova.compute.provider_tree [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.866501] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88d53c03-9285-4a6a-81b4-30073027da38 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.883602] env[63297]: DEBUG nova.scheduler.client.report [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1411.892084] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1411.892084] env[63297]: value = "task-1697488" [ 1411.892084] env[63297]: _type = "Task" [ 1411.892084] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.901781] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697488, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.920675] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697487, 'name': CreateVM_Task, 'duration_secs': 0.684568} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.920909] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1411.921741] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.922385] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.922385] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1411.922494] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f9164ec-4089-4260-8ae4-6916c2f254e7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.932018] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1411.932018] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526a704a-afc1-9e16-4821-161494043366" [ 1411.932018] env[63297]: _type = "Task" [ 1411.932018] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.937796] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526a704a-afc1-9e16-4821-161494043366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.970959] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d9175503-a670-464b-94be-8f0dd5a95a87 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.896s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.174145] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Releasing lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.241792] env[63297]: DEBUG nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1412.273916] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1412.274226] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1412.274417] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.274645] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1412.274839] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.275068] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1412.275422] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1412.275629] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1412.276138] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1412.276408] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1412.276628] env[63297]: DEBUG nova.virt.hardware [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1412.277614] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85c7eb5-b93c-4823-a6bf-b76d92bacf7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.287062] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc4887a-36f7-4d55-9f0f-efa28cc9bc2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.385645] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.159s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.386827] env[63297]: DEBUG nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1412.389527] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.840s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.389753] env[63297]: DEBUG nova.objects.instance [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'resources' on Instance uuid c4e96403-895c-479d-bfb2-274a87446bf9 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1412.405362] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697488, 'name': ReconfigVM_Task, 'duration_secs': 0.415854} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.405587] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1412.406381] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb1ced0-58d3-4951-9613-d4baff4a2b1e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.431162] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035/b95b7656-70ac-4eaf-9934-4b4c50e78035.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1412.432381] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d47c04ea-b2d5-4b7b-982a-a1ef07da6ace {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.456327] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526a704a-afc1-9e16-4821-161494043366, 'name': SearchDatastore_Task, 'duration_secs': 0.010312} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.457989] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.458257] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1412.458490] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.458639] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.458994] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1412.459450] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1412.459450] env[63297]: value = "task-1697489" [ 1412.459450] env[63297]: _type = "Task" [ 1412.459450] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.459762] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-538f812f-4cd5-40de-a03e-5484bf4dbee7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.473158] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697489, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.473512] env[63297]: DEBUG nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1412.476450] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1412.478387] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1412.478387] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a7b8d86-d1b1-4110-8d19-89866e171762 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.485424] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1412.485424] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e54c66-8f9d-809b-cbc3-83fb06af676e" [ 1412.485424] env[63297]: _type = "Task" [ 1412.485424] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.497405] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e54c66-8f9d-809b-cbc3-83fb06af676e, 'name': SearchDatastore_Task} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.498364] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7d0cdcb-8d5f-445a-afda-26a1af2195af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.504790] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1412.504790] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525cb5e7-effe-c725-4ff1-069e7fbb1187" [ 1412.504790] env[63297]: _type = "Task" [ 1412.504790] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.514635] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525cb5e7-effe-c725-4ff1-069e7fbb1187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.584976] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "fb33135a-073d-4d80-9833-5b29afae1cc6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.585703] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "fb33135a-073d-4d80-9833-5b29afae1cc6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.585958] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "fb33135a-073d-4d80-9833-5b29afae1cc6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.586202] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "fb33135a-073d-4d80-9833-5b29afae1cc6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.586408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "fb33135a-073d-4d80-9833-5b29afae1cc6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.591797] env[63297]: INFO nova.compute.manager [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Terminating instance [ 1412.593505] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "refresh_cache-fb33135a-073d-4d80-9833-5b29afae1cc6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.593679] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired lock "refresh_cache-fb33135a-073d-4d80-9833-5b29afae1cc6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.593873] env[63297]: DEBUG nova.network.neutron [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.678615] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1412.678922] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f14129d1-81cd-4f12-8b44-25e386cc445a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.686872] env[63297]: DEBUG oslo_vmware.api [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1412.686872] env[63297]: value = "task-1697490" [ 1412.686872] env[63297]: _type = "Task" [ 1412.686872] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.695555] env[63297]: DEBUG oslo_vmware.api [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697490, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.768796] env[63297]: DEBUG oslo_concurrency.lockutils [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.873417] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "c147f97d-7fae-4364-a9c0-04978df2450f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.873716] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c147f97d-7fae-4364-a9c0-04978df2450f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.874084] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "c147f97d-7fae-4364-a9c0-04978df2450f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.874362] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c147f97d-7fae-4364-a9c0-04978df2450f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.874584] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c147f97d-7fae-4364-a9c0-04978df2450f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.876916] env[63297]: INFO nova.compute.manager [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Terminating instance [ 1412.878821] env[63297]: DEBUG nova.compute.manager [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1412.879064] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1412.879896] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc748d0-1ff3-421c-b68a-6d534ff73270 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.890208] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1412.890500] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ad227d4-dda2-4344-838c-3ec461128d87 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.892979] env[63297]: DEBUG nova.compute.utils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1412.896672] env[63297]: DEBUG nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1412.896878] env[63297]: DEBUG nova.network.neutron [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1412.905332] env[63297]: DEBUG oslo_vmware.api [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1412.905332] env[63297]: value = "task-1697491" [ 1412.905332] env[63297]: _type = "Task" [ 1412.905332] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.917346] env[63297]: DEBUG oslo_vmware.api [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697491, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.964489] env[63297]: DEBUG nova.policy [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c6fae492b874c658a88928f1d16f384', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2dc4f669c9b04dd5a958165c6149138d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1412.982045] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697489, 'name': ReconfigVM_Task, 'duration_secs': 0.308749} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.993215] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfigured VM instance instance-00000030 to attach disk [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035/b95b7656-70ac-4eaf-9934-4b4c50e78035.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1412.993215] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance 'b95b7656-70ac-4eaf-9934-4b4c50e78035' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1413.016893] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.023354] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525cb5e7-effe-c725-4ff1-069e7fbb1187, 'name': SearchDatastore_Task, 'duration_secs': 0.022885} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.023619] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.023891] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1413.024635] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcd28e38-ae4d-4f18-ab92-8e8de9daa52c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.033733] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1413.033733] env[63297]: value = "task-1697492" [ 1413.033733] env[63297]: _type = "Task" [ 1413.033733] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.048211] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.048211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.048211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.048211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.048405] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.052249] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.060166] env[63297]: DEBUG nova.compute.manager [req-6aa5026d-3064-4b1a-bbf7-9678b3f8116c req-6597197c-9af9-44d9-abcc-f80466d7b4be service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Received event network-vif-plugged-c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1413.060401] env[63297]: DEBUG oslo_concurrency.lockutils [req-6aa5026d-3064-4b1a-bbf7-9678b3f8116c req-6597197c-9af9-44d9-abcc-f80466d7b4be service nova] Acquiring lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.060620] env[63297]: DEBUG oslo_concurrency.lockutils [req-6aa5026d-3064-4b1a-bbf7-9678b3f8116c req-6597197c-9af9-44d9-abcc-f80466d7b4be service nova] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.060801] env[63297]: DEBUG oslo_concurrency.lockutils [req-6aa5026d-3064-4b1a-bbf7-9678b3f8116c req-6597197c-9af9-44d9-abcc-f80466d7b4be service nova] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.060983] env[63297]: DEBUG nova.compute.manager [req-6aa5026d-3064-4b1a-bbf7-9678b3f8116c req-6597197c-9af9-44d9-abcc-f80466d7b4be service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] No waiting events found dispatching network-vif-plugged-c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1413.061195] env[63297]: WARNING nova.compute.manager [req-6aa5026d-3064-4b1a-bbf7-9678b3f8116c req-6597197c-9af9-44d9-abcc-f80466d7b4be service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Received unexpected event network-vif-plugged-c8c6ab49-f882-4349-bddd-cfb1a972afc0 for instance with vm_state building and task_state spawning. [ 1413.061808] env[63297]: INFO nova.compute.manager [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Terminating instance [ 1413.064092] env[63297]: DEBUG nova.compute.manager [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1413.064289] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1413.065171] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c61104-c89a-47b1-9974-77943bdf6040 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.078476] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1413.078700] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd3ed262-09a3-4b13-a1cb-215fb38e7614 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.088055] env[63297]: DEBUG oslo_vmware.api [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1413.088055] env[63297]: value = "task-1697493" [ 1413.088055] env[63297]: _type = "Task" [ 1413.088055] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.100445] env[63297]: DEBUG oslo_vmware.api [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697493, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.129564] env[63297]: DEBUG nova.network.neutron [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1413.175781] env[63297]: DEBUG nova.network.neutron [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Successfully updated port: c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1413.200203] env[63297]: DEBUG oslo_vmware.api [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697490, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.233043] env[63297]: DEBUG nova.network.neutron [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.358031] env[63297]: DEBUG nova.network.neutron [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Successfully created port: d6cfb890-e1a0-457e-801f-615234386b8f {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.397139] env[63297]: DEBUG nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1413.419315] env[63297]: DEBUG oslo_vmware.api [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697491, 'name': PowerOffVM_Task, 'duration_secs': 0.245794} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.419833] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1413.419833] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1413.420141] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43d5d655-1bf7-4411-a8a3-410691b767e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.500726] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd422653-7312-45c3-aa08-6da5f4e94f5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.526785] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a1fde9-2236-4c73-a126-67c4687bdfeb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.549870] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance 'b95b7656-70ac-4eaf-9934-4b4c50e78035' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1413.561638] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1413.561638] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1413.561638] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleting the datastore file [datastore1] c147f97d-7fae-4364-a9c0-04978df2450f {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1413.562165] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6bf29288-c1fb-487d-93bd-dc8c9f609616 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.571360] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697492, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.573525] env[63297]: DEBUG oslo_vmware.api [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1413.573525] env[63297]: value = "task-1697495" [ 1413.573525] env[63297]: _type = "Task" [ 1413.573525] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.586279] env[63297]: DEBUG oslo_vmware.api [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.599039] env[63297]: DEBUG oslo_vmware.api [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697493, 'name': PowerOffVM_Task, 'duration_secs': 0.196863} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.601446] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1413.601662] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1413.602448] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7da2f87-a086-4d3c-bd86-fde7cd3491c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.608553] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-034ade01-9d18-4cdd-9f8a-7797c7e8fb22 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.616523] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb81a38-8ffd-4d81-a550-ab33f8cb1fd0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.650391] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b5a57f-23b9-40f0-a42f-cea150e6a1b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.660913] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d88f6b8-d62c-4cff-9262-62272e534fba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.678439] env[63297]: DEBUG nova.compute.provider_tree [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.681246] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.681377] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.681548] env[63297]: DEBUG nova.network.neutron [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1413.702427] env[63297]: DEBUG oslo_vmware.api [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697490, 'name': PowerOnVM_Task, 'duration_secs': 0.771444} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.703671] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1413.704215] env[63297]: DEBUG nova.compute.manager [None req-42ce4f4f-6fca-483d-ab7c-198f5296ef12 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1413.704531] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1413.704731] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1413.704955] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleting the datastore file [datastore1] c83c23d9-a8ec-4a87-8a8c-067e18d2615a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1413.705755] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43caffb-5a78-4a78-89ca-6e42b325aa6c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.708452] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c38e5e85-acda-4b6d-8212-8d3043e96a03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.718337] env[63297]: DEBUG oslo_vmware.api [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1413.718337] env[63297]: value = "task-1697497" [ 1413.718337] env[63297]: _type = "Task" [ 1413.718337] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.728368] env[63297]: DEBUG oslo_vmware.api [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.736355] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Releasing lock "refresh_cache-fb33135a-073d-4d80-9833-5b29afae1cc6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.736746] env[63297]: DEBUG nova.compute.manager [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1413.736930] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1413.737844] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429de7de-b1e2-4c1f-bea8-118f40549beb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.746277] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1413.746891] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccfdbbdd-b4bf-4c50-bcff-61a0f9b57d47 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.754683] env[63297]: DEBUG oslo_vmware.api [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1413.754683] env[63297]: value = "task-1697498" [ 1413.754683] env[63297]: _type = "Task" [ 1413.754683] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.764216] env[63297]: DEBUG oslo_vmware.api [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.059841] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575652} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.060115] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1414.060329] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1414.060582] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4eb97ef2-71ed-45da-b4ad-f645e41c0b88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.072065] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1414.072065] env[63297]: value = "task-1697499" [ 1414.072065] env[63297]: _type = "Task" [ 1414.072065] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.085100] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.088731] env[63297]: DEBUG oslo_vmware.api [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325283} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.088731] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1414.088731] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1414.088731] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1414.088731] env[63297]: INFO nova.compute.manager [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1414.088731] env[63297]: DEBUG oslo.service.loopingcall [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.088985] env[63297]: DEBUG nova.compute.manager [-] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1414.088985] env[63297]: DEBUG nova.network.neutron [-] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1414.111502] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Acquiring lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.112207] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.112207] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Acquiring lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.112402] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.112436] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.114577] env[63297]: INFO nova.compute.manager [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Terminating instance [ 1414.116663] env[63297]: DEBUG nova.compute.manager [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1414.116964] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4332df19-f5b5-4527-869b-aedcb89d0f5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.126852] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd48417-aadc-4a7d-9a14-a795db882a25 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.166019] env[63297]: WARNING nova.virt.vmwareapi.driver [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe could not be found. [ 1414.166019] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1414.166019] env[63297]: DEBUG nova.network.neutron [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Port d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1414.166019] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cce7acc1-a01f-43e9-a826-a4f2ef7430eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.175766] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484fd9e4-d4c4-4aac-bc91-5b43afbc0d4b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.188414] env[63297]: DEBUG nova.scheduler.client.report [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1414.217856] env[63297]: WARNING nova.virt.vmwareapi.vmops [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe could not be found. [ 1414.218129] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1414.218318] env[63297]: INFO nova.compute.manager [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1414.218562] env[63297]: DEBUG oslo.service.loopingcall [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.218800] env[63297]: DEBUG nova.compute.manager [-] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1414.218912] env[63297]: DEBUG nova.network.neutron [-] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1414.233589] env[63297]: DEBUG oslo_vmware.api [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.261779] env[63297]: DEBUG nova.network.neutron [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.269606] env[63297]: DEBUG oslo_vmware.api [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697498, 'name': PowerOffVM_Task, 'duration_secs': 0.116324} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.269864] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1414.270048] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1414.270300] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4010f1e3-5005-4a40-9f61-d3459891812f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.300259] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1414.300526] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1414.300719] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Deleting the datastore file [datastore1] fb33135a-073d-4d80-9833-5b29afae1cc6 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1414.300987] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00f8a738-5202-4098-8b39-fbddee7d34a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.308434] env[63297]: DEBUG oslo_vmware.api [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1414.308434] env[63297]: value = "task-1697501" [ 1414.308434] env[63297]: _type = "Task" [ 1414.308434] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.320217] env[63297]: DEBUG oslo_vmware.api [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.407122] env[63297]: DEBUG nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1414.436122] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1414.436405] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1414.436569] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.436750] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1414.436973] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.437440] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1414.437660] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1414.437826] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1414.438012] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1414.438249] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1414.438368] env[63297]: DEBUG nova.virt.hardware [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1414.439413] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba0e427-bf37-4507-8c08-c733b9ac6f51 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.448999] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c2d7a0-06d5-4bbc-b332-b143a3c6c45f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.519135] env[63297]: DEBUG nova.network.neutron [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [{"id": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "address": "fa:16:3e:23:ee:4c", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ab49-f8", "ovs_interfaceid": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.586130] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.289435} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.586130] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1414.587258] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8b3193-ac2b-4900-b873-2d6c27830fb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.614207] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1414.615061] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4c11b54-e77c-4711-9aa7-c4e57e1479fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.638739] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1414.638739] env[63297]: value = "task-1697502" [ 1414.638739] env[63297]: _type = "Task" [ 1414.638739] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.648280] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.695408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.306s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.698108] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.958s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.698434] env[63297]: DEBUG nova.objects.instance [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lazy-loading 'resources' on Instance uuid ef57101e-1d8a-4ad5-ad68-cad2dbea33d1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1414.728143] env[63297]: INFO nova.scheduler.client.report [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted allocations for instance c4e96403-895c-479d-bfb2-274a87446bf9 [ 1414.736889] env[63297]: DEBUG oslo_vmware.api [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.728245} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.737143] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1414.737329] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1414.737923] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1414.737923] env[63297]: INFO nova.compute.manager [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1414.737923] env[63297]: DEBUG oslo.service.loopingcall [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.738310] env[63297]: DEBUG nova.compute.manager [-] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1414.738407] env[63297]: DEBUG nova.network.neutron [-] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1414.823269] env[63297]: DEBUG oslo_vmware.api [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392698} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.825603] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1414.825824] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1414.826899] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1414.827312] env[63297]: INFO nova.compute.manager [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1414.827521] env[63297]: DEBUG oslo.service.loopingcall [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.828110] env[63297]: DEBUG nova.compute.manager [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1414.828110] env[63297]: DEBUG nova.network.neutron [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1414.849971] env[63297]: DEBUG nova.network.neutron [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.874817] env[63297]: DEBUG nova.compute.manager [req-a686afa4-f7ae-4244-951f-5daa97c95430 req-3adbef53-eaf5-420a-a7f7-2dca24b3c23a service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Received event network-vif-deleted-1354943e-9dda-4b43-9508-a9535e5627e5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1414.874817] env[63297]: INFO nova.compute.manager [req-a686afa4-f7ae-4244-951f-5daa97c95430 req-3adbef53-eaf5-420a-a7f7-2dca24b3c23a service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Neutron deleted interface 1354943e-9dda-4b43-9508-a9535e5627e5; detaching it from the instance and deleting it from the info cache [ 1414.874817] env[63297]: DEBUG nova.network.neutron [req-a686afa4-f7ae-4244-951f-5daa97c95430 req-3adbef53-eaf5-420a-a7f7-2dca24b3c23a service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.020501] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.020898] env[63297]: DEBUG nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Instance network_info: |[{"id": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "address": "fa:16:3e:23:ee:4c", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ab49-f8", "ovs_interfaceid": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1415.021425] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:ee:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e55c248-c504-4c7a-bbe9-f42cf417aee7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8c6ab49-f882-4349-bddd-cfb1a972afc0', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.034559] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating folder: Project (6e3dcd98ebe94a75a94322b03feba3b4). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.035812] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a54670ff-d602-4037-b30e-6363c1622319 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.051425] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Created folder: Project (6e3dcd98ebe94a75a94322b03feba3b4) in parent group-v353718. [ 1415.051546] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating folder: Instances. Parent ref: group-v353896. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.051693] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2ec1093-d475-44e0-b0cd-d72ef9c468fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.063940] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Created folder: Instances in parent group-v353896. [ 1415.064230] env[63297]: DEBUG oslo.service.loopingcall [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.064511] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.064645] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9296c1d9-b0a5-478b-9a94-9d724318444c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.084590] env[63297]: DEBUG nova.network.neutron [-] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.093083] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1415.093083] env[63297]: value = "task-1697505" [ 1415.093083] env[63297]: _type = "Task" [ 1415.093083] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.104403] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697505, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.150559] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697502, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.187992] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.188254] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.188425] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.225397] env[63297]: DEBUG nova.compute.manager [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Received event network-changed-c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1415.225602] env[63297]: DEBUG nova.compute.manager [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Refreshing instance network info cache due to event network-changed-c8c6ab49-f882-4349-bddd-cfb1a972afc0. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1415.226121] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] Acquiring lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.226329] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] Acquired lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.226515] env[63297]: DEBUG nova.network.neutron [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Refreshing network info cache for port c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1415.241779] env[63297]: DEBUG oslo_concurrency.lockutils [None req-de646f80-c84d-4c4a-b075-16dace8fbc94 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c4e96403-895c-479d-bfb2-274a87446bf9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.095s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.326456] env[63297]: DEBUG nova.network.neutron [-] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.353958] env[63297]: DEBUG nova.network.neutron [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.374228] env[63297]: DEBUG nova.network.neutron [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Successfully updated port: d6cfb890-e1a0-457e-801f-615234386b8f {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1415.377852] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca562dd3-c133-4f84-8935-353fd9571580 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.390453] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88d6a96-2c70-4253-8e34-6e05d40fcd1f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.432396] env[63297]: DEBUG nova.compute.manager [req-a686afa4-f7ae-4244-951f-5daa97c95430 req-3adbef53-eaf5-420a-a7f7-2dca24b3c23a service nova] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Detach interface failed, port_id=1354943e-9dda-4b43-9508-a9535e5627e5, reason: Instance a57c0638-e14b-4474-a6b4-7184d7e2a0fe could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1415.541461] env[63297]: DEBUG nova.network.neutron [-] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.586582] env[63297]: INFO nova.compute.manager [-] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Took 1.50 seconds to deallocate network for instance. [ 1415.607476] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697505, 'name': CreateVM_Task, 'duration_secs': 0.474345} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.607707] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1415.608395] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.608564] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.608881] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1415.609161] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ba9d8a5-33b2-4881-a247-5742878eb0f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.617346] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1415.617346] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aaafec-3404-3bb9-5514-9f230918d88e" [ 1415.617346] env[63297]: _type = "Task" [ 1415.617346] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.629308] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aaafec-3404-3bb9-5514-9f230918d88e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.654434] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697502, 'name': ReconfigVM_Task, 'duration_secs': 0.61001} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.654434] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Reconfigured VM instance instance-00000015 to attach disk [datastore1] b14e8466-68ab-4705-a439-6db961a149b0/b14e8466-68ab-4705-a439-6db961a149b0.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1415.656985] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec2e1b0f-bf86-4358-b9c9-a14762e29197 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.665724] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1415.665724] env[63297]: value = "task-1697506" [ 1415.665724] env[63297]: _type = "Task" [ 1415.665724] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.680255] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697506, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.826902] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311c6565-0ad5-4898-88b8-101d39328052 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.830410] env[63297]: INFO nova.compute.manager [-] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Took 1.61 seconds to deallocate network for instance. [ 1415.838214] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb89d7a0-e154-46c0-8713-b79f7af152be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.877285] env[63297]: INFO nova.compute.manager [-] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Took 1.05 seconds to deallocate network for instance. [ 1415.880281] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.880423] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquired lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.880564] env[63297]: DEBUG nova.network.neutron [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1415.883693] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02abe699-5452-4160-af49-30d07614118d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.895203] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263c72b8-f731-41f2-9461-0713109ed00d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.911707] env[63297]: DEBUG nova.compute.provider_tree [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.044123] env[63297]: INFO nova.compute.manager [-] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Took 1.31 seconds to deallocate network for instance. [ 1416.093042] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.130415] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aaafec-3404-3bb9-5514-9f230918d88e, 'name': SearchDatastore_Task, 'duration_secs': 0.020671} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.130745] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.130957] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1416.131207] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.131352] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.131532] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1416.131789] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbfe1fda-13f5-41c1-aecb-03f636c54ad7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.146974] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1416.147198] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1416.148030] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e363154-53cb-43e1-a051-65835ae37f96 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.153900] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1416.153900] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d24aa-e53f-5f18-22c5-74508e7201f5" [ 1416.153900] env[63297]: _type = "Task" [ 1416.153900] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.162483] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d24aa-e53f-5f18-22c5-74508e7201f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.174219] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697506, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.253958] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.254181] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.254357] env[63297]: DEBUG nova.network.neutron [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1416.384945] env[63297]: INFO nova.compute.manager [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Took 0.55 seconds to detach 1 volumes for instance. [ 1416.387182] env[63297]: DEBUG nova.compute.manager [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Deleting volume: 950dc879-aa21-4a03-88c5-e4e67d4e27c0 {{(pid=63297) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1416.391920] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.414341] env[63297]: DEBUG nova.network.neutron [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updated VIF entry in instance network info cache for port c8c6ab49-f882-4349-bddd-cfb1a972afc0. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1416.414784] env[63297]: DEBUG nova.network.neutron [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [{"id": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "address": "fa:16:3e:23:ee:4c", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ab49-f8", "ovs_interfaceid": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.416521] env[63297]: DEBUG nova.scheduler.client.report [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1416.420952] env[63297]: DEBUG nova.network.neutron [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1416.555815] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.618866] env[63297]: DEBUG nova.network.neutron [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Updating instance_info_cache with network_info: [{"id": "d6cfb890-e1a0-457e-801f-615234386b8f", "address": "fa:16:3e:7d:3b:5f", "network": {"id": "c0ff79df-83e5-4ec9-9ef7-0f0172b39d9a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1638451245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2dc4f669c9b04dd5a958165c6149138d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6cfb890-e1", "ovs_interfaceid": "d6cfb890-e1a0-457e-801f-615234386b8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.666690] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.667371] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d24aa-e53f-5f18-22c5-74508e7201f5, 'name': SearchDatastore_Task, 'duration_secs': 0.029557} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.667635] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.673702] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efa71b0b-9959-401f-8db9-f436735da94e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.680852] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1416.680852] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5210b50f-b2fe-a87c-6474-63acd61163fa" [ 1416.680852] env[63297]: _type = "Task" [ 1416.680852] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.685878] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697506, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.697190] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5210b50f-b2fe-a87c-6474-63acd61163fa, 'name': SearchDatastore_Task, 'duration_secs': 0.011544} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.697544] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.698363] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b1ed5d76-d358-49d3-a854-8f968bc987ad/b1ed5d76-d358-49d3-a854-8f968bc987ad.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1416.698701] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7691b5e-4bef-44f3-b3cb-0989c1ce00cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.706682] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1416.706682] env[63297]: value = "task-1697508" [ 1416.706682] env[63297]: _type = "Task" [ 1416.706682] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.717625] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.926638] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] Releasing lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.927062] env[63297]: DEBUG nova.compute.manager [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Received event network-vif-deleted-d0d9c69a-a4dc-4597-aeef-2c866176b393 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1416.927324] env[63297]: DEBUG nova.compute.manager [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Received event network-vif-deleted-4c61e911-9830-4ecf-b4e8-2b56f86048dc {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1416.927550] env[63297]: INFO nova.compute.manager [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Neutron deleted interface 4c61e911-9830-4ecf-b4e8-2b56f86048dc; detaching it from the instance and deleting it from the info cache [ 1416.927781] env[63297]: DEBUG nova.network.neutron [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.931052] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.233s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.936099] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.408s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.936099] env[63297]: DEBUG nova.objects.instance [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lazy-loading 'resources' on Instance uuid b5d34058-fa3e-4806-97e5-638bbbffaeb8 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1416.946316] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.950819] env[63297]: INFO nova.scheduler.client.report [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Deleted allocations for instance ef57101e-1d8a-4ad5-ad68-cad2dbea33d1 [ 1417.119210] env[63297]: DEBUG nova.compute.manager [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] Received event network-vif-plugged-d6cfb890-e1a0-457e-801f-615234386b8f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1417.119210] env[63297]: DEBUG oslo_concurrency.lockutils [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] Acquiring lock "d2436717-7230-448f-b310-d062b1f11c52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.119210] env[63297]: DEBUG oslo_concurrency.lockutils [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] Lock "d2436717-7230-448f-b310-d062b1f11c52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.119210] env[63297]: DEBUG oslo_concurrency.lockutils [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] Lock "d2436717-7230-448f-b310-d062b1f11c52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.119210] env[63297]: DEBUG nova.compute.manager [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] No waiting events found dispatching network-vif-plugged-d6cfb890-e1a0-457e-801f-615234386b8f {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1417.120338] env[63297]: WARNING nova.compute.manager [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] Received unexpected event network-vif-plugged-d6cfb890-e1a0-457e-801f-615234386b8f for instance with vm_state building and task_state spawning. [ 1417.121608] env[63297]: DEBUG nova.compute.manager [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] Received event network-changed-d6cfb890-e1a0-457e-801f-615234386b8f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1417.122085] env[63297]: DEBUG nova.compute.manager [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] Refreshing instance network info cache due to event network-changed-d6cfb890-e1a0-457e-801f-615234386b8f. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1417.122470] env[63297]: DEBUG oslo_concurrency.lockutils [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] Acquiring lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.123635] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Releasing lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.125018] env[63297]: DEBUG nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Instance network_info: |[{"id": "d6cfb890-e1a0-457e-801f-615234386b8f", "address": "fa:16:3e:7d:3b:5f", "network": {"id": "c0ff79df-83e5-4ec9-9ef7-0f0172b39d9a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1638451245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2dc4f669c9b04dd5a958165c6149138d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6cfb890-e1", "ovs_interfaceid": "d6cfb890-e1a0-457e-801f-615234386b8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1417.125018] env[63297]: DEBUG oslo_concurrency.lockutils [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] Acquired lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.125018] env[63297]: DEBUG nova.network.neutron [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] Refreshing network info cache for port d6cfb890-e1a0-457e-801f-615234386b8f {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1417.126873] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:3b:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6eb7e3e9-5cc2-40f1-a6eb-f70f06531667', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6cfb890-e1a0-457e-801f-615234386b8f', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1417.136740] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Creating folder: Project (2dc4f669c9b04dd5a958165c6149138d). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1417.137935] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44479874-5004-4cd4-8f4e-fe08f8ea78f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.154213] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Created folder: Project (2dc4f669c9b04dd5a958165c6149138d) in parent group-v353718. [ 1417.154213] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Creating folder: Instances. Parent ref: group-v353899. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1417.154213] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cbbf7ffb-944d-48c1-8e51-4ee9ce7ed029 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.164241] env[63297]: DEBUG nova.network.neutron [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance_info_cache with network_info: [{"id": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "address": "fa:16:3e:21:7c:1d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9a7e1f4-1a", "ovs_interfaceid": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.170203] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Created folder: Instances in parent group-v353899. [ 1417.170203] env[63297]: DEBUG oslo.service.loopingcall [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1417.173290] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2436717-7230-448f-b310-d062b1f11c52] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1417.180692] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0894cefa-d138-4b81-b61e-5ff431903b8f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.200966] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1417.201656] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1417.201656] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1417.208721] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697506, 'name': Rename_Task, 'duration_secs': 1.261766} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.214820] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1417.218336] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1417.218336] env[63297]: value = "task-1697511" [ 1417.218336] env[63297]: _type = "Task" [ 1417.218336] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.218918] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb484bf5-5c33-4255-be71-dd458103cc2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.220816] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "efaa465d-f6b2-4891-8e96-b4c3af052759" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.221114] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.230086] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697508, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.235979] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697511, 'name': CreateVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.237561] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1417.237561] env[63297]: value = "task-1697512" [ 1417.237561] env[63297]: _type = "Task" [ 1417.237561] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.435923] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b122c15a-1da0-444a-a4fd-b2d0255bc0c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.449897] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205e38ec-e9e2-45f9-84d8-cecb430987ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.469221] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42952031-d985-447a-a884-72777bc9c123 tempest-ServersTestJSON-518956339 tempest-ServersTestJSON-518956339-project-member] Lock "ef57101e-1d8a-4ad5-ad68-cad2dbea33d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.696s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.496853] env[63297]: DEBUG nova.compute.manager [req-e2af70dd-a9c6-4a10-9f2f-42eed3c3b5e7 req-649a44b2-22ce-496e-a19d-9a3dd127c956 service nova] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Detach interface failed, port_id=4c61e911-9830-4ecf-b4e8-2b56f86048dc, reason: Instance c83c23d9-a8ec-4a87-8a8c-067e18d2615a could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1417.666349] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.715418] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Skipping network cache update for instance because it is Building. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1417.715418] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: d2436717-7230-448f-b310-d062b1f11c52] Skipping network cache update for instance because it is Building. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1417.737410] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634007} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.737410] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697511, 'name': CreateVM_Task, 'duration_secs': 0.424564} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.739574] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b1ed5d76-d358-49d3-a854-8f968bc987ad/b1ed5d76-d358-49d3-a854-8f968bc987ad.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1417.739838] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1417.740079] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2436717-7230-448f-b310-d062b1f11c52] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1417.740994] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c56c86b-9f2b-478c-8402-bf668f85ed7d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.743631] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.743860] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.744427] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1417.758053] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.758223] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.758343] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1417.758497] env[63297]: DEBUG nova.objects.instance [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lazy-loading 'info_cache' on Instance uuid ef851d71-788d-42f8-a824-5d30a89e957b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1417.761284] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df63c4eb-ce35-4093-87eb-acaeac7ce52e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.772026] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697512, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.772327] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1417.772327] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f14a9b-fbdd-1409-0646-a44073159bbf" [ 1417.772327] env[63297]: _type = "Task" [ 1417.772327] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.773565] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1417.773565] env[63297]: value = "task-1697513" [ 1417.773565] env[63297]: _type = "Task" [ 1417.773565] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.796335] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f14a9b-fbdd-1409-0646-a44073159bbf, 'name': SearchDatastore_Task, 'duration_secs': 0.011588} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.796606] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.799203] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.799438] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.799666] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.799812] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.799988] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1417.800457] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c73c8c2e-2dc3-4e0e-a952-05c0a536905d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.809565] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1417.809794] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1417.810503] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb92e47b-9c67-4dd0-a8f1-9541ebac6b47 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.819931] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1417.819931] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e2383d-60e2-d477-b4a8-df8bd0da1204" [ 1417.819931] env[63297]: _type = "Task" [ 1417.819931] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.829367] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e2383d-60e2-d477-b4a8-df8bd0da1204, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.080085] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dfd167-b82f-4e93-baa2-6a27249fe186 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.088724] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c28258d-925c-4239-a6f8-898c2f9ee580 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.131122] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90026019-02f0-4ae6-8a4a-cc365ceb9994 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.139862] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac82a7b-4643-4481-a549-c12304dd698f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.155919] env[63297]: DEBUG nova.compute.provider_tree [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1418.157976] env[63297]: DEBUG nova.network.neutron [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] Updated VIF entry in instance network info cache for port d6cfb890-e1a0-457e-801f-615234386b8f. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1418.158328] env[63297]: DEBUG nova.network.neutron [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] Updating instance_info_cache with network_info: [{"id": "d6cfb890-e1a0-457e-801f-615234386b8f", "address": "fa:16:3e:7d:3b:5f", "network": {"id": "c0ff79df-83e5-4ec9-9ef7-0f0172b39d9a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1638451245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2dc4f669c9b04dd5a958165c6149138d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6cfb890-e1", "ovs_interfaceid": "d6cfb890-e1a0-457e-801f-615234386b8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.198689] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0eb923-5b34-4a45-89c8-89149029b32e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.221857] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78706122-558d-44eb-83d1-16d3c9923a33 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.230334] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance 'b95b7656-70ac-4eaf-9934-4b4c50e78035' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1418.250842] env[63297]: DEBUG oslo_vmware.api [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697512, 'name': PowerOnVM_Task, 'duration_secs': 0.544113} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.254152] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1418.254152] env[63297]: DEBUG nova.compute.manager [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1418.254152] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c94f9b-e783-4c16-99dc-7b139d97e355 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.287513] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126097} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.287824] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1418.288676] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35076b3f-b9ee-497f-9c0b-4b52d8efdf61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.312500] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] b1ed5d76-d358-49d3-a854-8f968bc987ad/b1ed5d76-d358-49d3-a854-8f968bc987ad.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1418.313943] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a5fa916-c1df-4b86-a7cf-85384774143f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.347756] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e2383d-60e2-d477-b4a8-df8bd0da1204, 'name': SearchDatastore_Task, 'duration_secs': 0.039408} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.349939] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1418.349939] env[63297]: value = "task-1697514" [ 1418.349939] env[63297]: _type = "Task" [ 1418.349939] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.350171] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1078406a-cd4e-4258-b12d-8553f50da8d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.363122] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1418.363122] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52913d0f-c430-f86f-fdae-bbc79220b784" [ 1418.363122] env[63297]: _type = "Task" [ 1418.363122] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.366460] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697514, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.375534] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52913d0f-c430-f86f-fdae-bbc79220b784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.663019] env[63297]: DEBUG nova.scheduler.client.report [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1418.670529] env[63297]: DEBUG oslo_concurrency.lockutils [req-93da240a-e570-40b5-83d3-394d190ec9cb req-2592633d-4cb6-4412-9b8b-12c6a98ac1cf service nova] Releasing lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.739019] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1418.739019] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e30f4ed-b22d-4682-bf29-30b876c59af7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.747018] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1418.747018] env[63297]: value = "task-1697515" [ 1418.747018] env[63297]: _type = "Task" [ 1418.747018] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.757975] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697515, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.775735] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.793255] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.864097] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697514, 'name': ReconfigVM_Task, 'duration_secs': 0.295842} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.864202] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Reconfigured VM instance instance-0000003b to attach disk [datastore1] b1ed5d76-d358-49d3-a854-8f968bc987ad/b1ed5d76-d358-49d3-a854-8f968bc987ad.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1418.864906] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49f1bfc7-6381-4dce-8158-f607c4a11529 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.876180] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52913d0f-c430-f86f-fdae-bbc79220b784, 'name': SearchDatastore_Task, 'duration_secs': 0.026327} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.877527] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.877811] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d2436717-7230-448f-b310-d062b1f11c52/d2436717-7230-448f-b310-d062b1f11c52.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1418.878117] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1418.878117] env[63297]: value = "task-1697516" [ 1418.878117] env[63297]: _type = "Task" [ 1418.878117] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.878308] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cfdd7df-1a5b-4576-bbd1-168b6d9b59be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.888430] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697516, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.890310] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1418.890310] env[63297]: value = "task-1697517" [ 1418.890310] env[63297]: _type = "Task" [ 1418.890310] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.899269] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697517, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.174169] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.239s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.176966] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.800s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.179243] env[63297]: INFO nova.compute.claims [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1419.207313] env[63297]: INFO nova.scheduler.client.report [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Deleted allocations for instance b5d34058-fa3e-4806-97e5-638bbbffaeb8 [ 1419.259935] env[63297]: DEBUG oslo_vmware.api [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697515, 'name': PowerOnVM_Task, 'duration_secs': 0.432578} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.260811] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1419.260811] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1cd8de-be8a-4872-8f01-c73d46892ff7 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance 'b95b7656-70ac-4eaf-9934-4b4c50e78035' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1419.404243] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697516, 'name': Rename_Task, 'duration_secs': 0.155517} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.409141] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1419.410081] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697517, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.410512] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c001153b-1e76-4a56-8828-7cd72651d261 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.423314] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1419.423314] env[63297]: value = "task-1697518" [ 1419.423314] env[63297]: _type = "Task" [ 1419.423314] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.439038] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.465731] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.720870] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06be51b7-b7f9-4eac-bd92-735276358efd tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b5d34058-fa3e-4806-97e5-638bbbffaeb8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.033s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.749871] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.750566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.751022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.751409] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.751750] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.755189] env[63297]: INFO nova.compute.manager [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Terminating instance [ 1419.758052] env[63297]: DEBUG nova.compute.manager [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1419.758432] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1419.759489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1222a6-2bca-46b0-9fca-e81411280d08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.774826] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1419.775909] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e59bf968-1a64-4617-b26b-47f6af2306ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.788269] env[63297]: DEBUG oslo_vmware.api [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1419.788269] env[63297]: value = "task-1697519" [ 1419.788269] env[63297]: _type = "Task" [ 1419.788269] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.799822] env[63297]: DEBUG oslo_vmware.api [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697519, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.903928] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697517, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580495} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.904418] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d2436717-7230-448f-b310-d062b1f11c52/d2436717-7230-448f-b310-d062b1f11c52.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1419.904749] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1419.905154] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-954f554a-b3e2-42b8-9fcd-6144adb31053 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.914967] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1419.914967] env[63297]: value = "task-1697520" [ 1419.914967] env[63297]: _type = "Task" [ 1419.914967] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.926531] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.937358] env[63297]: DEBUG oslo_vmware.api [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697518, 'name': PowerOnVM_Task, 'duration_secs': 0.493236} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.937723] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1419.937966] env[63297]: INFO nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Took 7.70 seconds to spawn the instance on the hypervisor. [ 1419.938216] env[63297]: DEBUG nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1419.939043] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47231e84-4a69-461b-b30b-eb0520cab967 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.967788] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.968073] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1419.971744] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.971965] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.972106] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.972263] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.972409] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.972555] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.972689] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1419.972830] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1420.300145] env[63297]: DEBUG oslo_vmware.api [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697519, 'name': PowerOffVM_Task, 'duration_secs': 0.399089} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.302995] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.303226] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1420.303724] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2c0d5b1-93a9-4994-8585-a7287a88bd2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.399500] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1420.399744] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1420.399926] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleting the datastore file [datastore1] 63785911-ea55-4aeb-9ba2-6cea5ddd9cae {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1420.400222] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5cfed2e-f1bd-4565-9c3d-ca3b0d016f67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.410111] env[63297]: DEBUG oslo_vmware.api [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1420.410111] env[63297]: value = "task-1697522" [ 1420.410111] env[63297]: _type = "Task" [ 1420.410111] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.431653] env[63297]: DEBUG oslo_vmware.api [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.433873] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162095} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.435381] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1420.436502] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ac60a5-3c1e-4211-80e5-fdcd67fd18ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.470067] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] d2436717-7230-448f-b310-d062b1f11c52/d2436717-7230-448f-b310-d062b1f11c52.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1420.475643] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6163465-e8ea-4efa-8562-924d7b17ceeb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.492609] env[63297]: INFO nova.compute.manager [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Took 51.49 seconds to build instance. [ 1420.495485] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.503052] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1420.503052] env[63297]: value = "task-1697523" [ 1420.503052] env[63297]: _type = "Task" [ 1420.503052] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.514174] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697523, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.804939] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3f5ff3-fc9c-4774-a5fe-83505312b334 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.816025] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40eb82ce-15ca-4b33-9446-773f73ea7f42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.849509] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2ac64a-a82e-4e2d-adb7-7fdbade5942b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.862031] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e006213c-c9bf-4141-8427-809ebaef4f1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.874498] env[63297]: DEBUG nova.compute.provider_tree [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.927953] env[63297]: DEBUG oslo_vmware.api [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697522, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.247549} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.928408] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1420.928499] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1420.928701] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1420.929775] env[63297]: INFO nova.compute.manager [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1420.929775] env[63297]: DEBUG oslo.service.loopingcall [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1420.929775] env[63297]: DEBUG nova.compute.manager [-] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1420.929775] env[63297]: DEBUG nova.network.neutron [-] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1420.995472] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5922fa3f-8cea-4f45-864d-204e3db990eb tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.310s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.017229] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697523, 'name': ReconfigVM_Task, 'duration_secs': 0.470422} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.017551] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Reconfigured VM instance instance-0000003c to attach disk [datastore1] d2436717-7230-448f-b310-d062b1f11c52/d2436717-7230-448f-b310-d062b1f11c52.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1421.018194] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0229f6d8-3eb7-425a-b419-7489de4d5833 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.026222] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1421.026222] env[63297]: value = "task-1697524" [ 1421.026222] env[63297]: _type = "Task" [ 1421.026222] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.040303] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697524, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.260409] env[63297]: DEBUG nova.compute.manager [req-7fa96c99-87fc-4de1-9eba-ba82f953ef0a req-0bd111ba-ed76-4097-9a0b-22f91f561cbb service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Received event network-changed-c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1421.260634] env[63297]: DEBUG nova.compute.manager [req-7fa96c99-87fc-4de1-9eba-ba82f953ef0a req-0bd111ba-ed76-4097-9a0b-22f91f561cbb service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Refreshing instance network info cache due to event network-changed-c8c6ab49-f882-4349-bddd-cfb1a972afc0. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1421.262098] env[63297]: DEBUG oslo_concurrency.lockutils [req-7fa96c99-87fc-4de1-9eba-ba82f953ef0a req-0bd111ba-ed76-4097-9a0b-22f91f561cbb service nova] Acquiring lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.262428] env[63297]: DEBUG oslo_concurrency.lockutils [req-7fa96c99-87fc-4de1-9eba-ba82f953ef0a req-0bd111ba-ed76-4097-9a0b-22f91f561cbb service nova] Acquired lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.262729] env[63297]: DEBUG nova.network.neutron [req-7fa96c99-87fc-4de1-9eba-ba82f953ef0a req-0bd111ba-ed76-4097-9a0b-22f91f561cbb service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Refreshing network info cache for port c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1421.377949] env[63297]: DEBUG nova.scheduler.client.report [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1421.498243] env[63297]: DEBUG nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1421.544081] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697524, 'name': Rename_Task, 'duration_secs': 0.299038} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.544081] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1421.544258] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9198171-aae8-4cc3-bcd1-7e706a1f904e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.553109] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1421.553109] env[63297]: value = "task-1697525" [ 1421.553109] env[63297]: _type = "Task" [ 1421.553109] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.563658] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697525, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.885695] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.886222] env[63297]: DEBUG nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1421.894666] env[63297]: DEBUG nova.network.neutron [-] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.895682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.377s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.897260] env[63297]: INFO nova.compute.claims [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1422.019624] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.065309] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697525, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.177929] env[63297]: DEBUG nova.network.neutron [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Port d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1422.178278] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.178499] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.178597] env[63297]: DEBUG nova.network.neutron [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1422.224467] env[63297]: DEBUG nova.network.neutron [req-7fa96c99-87fc-4de1-9eba-ba82f953ef0a req-0bd111ba-ed76-4097-9a0b-22f91f561cbb service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updated VIF entry in instance network info cache for port c8c6ab49-f882-4349-bddd-cfb1a972afc0. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.224862] env[63297]: DEBUG nova.network.neutron [req-7fa96c99-87fc-4de1-9eba-ba82f953ef0a req-0bd111ba-ed76-4097-9a0b-22f91f561cbb service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [{"id": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "address": "fa:16:3e:23:ee:4c", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ab49-f8", "ovs_interfaceid": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.396914] env[63297]: DEBUG nova.compute.utils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1422.398047] env[63297]: DEBUG nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1422.398216] env[63297]: DEBUG nova.network.neutron [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1422.406134] env[63297]: INFO nova.compute.manager [-] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Took 1.47 seconds to deallocate network for instance. [ 1422.484756] env[63297]: DEBUG nova.policy [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30253d72cb1f4a9faa9b616ad418d9e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1348601359224186bf59b12bfa5f1ef0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1422.566409] env[63297]: DEBUG oslo_vmware.api [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697525, 'name': PowerOnVM_Task, 'duration_secs': 0.919526} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.566758] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1422.566978] env[63297]: INFO nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Took 8.16 seconds to spawn the instance on the hypervisor. [ 1422.567202] env[63297]: DEBUG nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1422.568231] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c53cfe9-ab33-4827-b8c2-6a2b70a3a117 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.729408] env[63297]: DEBUG oslo_concurrency.lockutils [req-7fa96c99-87fc-4de1-9eba-ba82f953ef0a req-0bd111ba-ed76-4097-9a0b-22f91f561cbb service nova] Releasing lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.901709] env[63297]: DEBUG nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1422.915285] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.989025] env[63297]: DEBUG nova.network.neutron [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance_info_cache with network_info: [{"id": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "address": "fa:16:3e:21:7c:1d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9a7e1f4-1a", "ovs_interfaceid": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.042949] env[63297]: DEBUG nova.network.neutron [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Successfully created port: 902a1c73-2ddc-4fab-8f16-538934cbd123 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1423.089509] env[63297]: INFO nova.compute.manager [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Took 43.12 seconds to build instance. [ 1423.405038] env[63297]: DEBUG nova.compute.manager [req-c97666c7-3168-4d2f-ac15-d67cb69b0e4e req-6e74a96b-0d3e-420e-b3a4-cd403de864a2 service nova] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Received event network-vif-deleted-2b5c1a4a-c8b7-459a-b120-b193ed9337d5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1423.436940] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f04a69-7708-4560-a9df-4aa533c1d32c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.445888] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85137f84-ac20-4a21-adf1-1f5feb5ef46b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.480786] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e405de-8470-4ee7-ba1e-857036e962d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.489406] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a186ca-11fa-42ed-a199-6d88fbb29fac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.494405] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.508671] env[63297]: DEBUG nova.compute.provider_tree [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1423.591830] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f81618b-8ec0-47f1-bb74-820c0e2e7fc0 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "d2436717-7230-448f-b310-d062b1f11c52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.190s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.729041] env[63297]: DEBUG oslo_concurrency.lockutils [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "d2436717-7230-448f-b310-d062b1f11c52" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.729041] env[63297]: DEBUG oslo_concurrency.lockutils [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "d2436717-7230-448f-b310-d062b1f11c52" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.731987] env[63297]: INFO nova.compute.manager [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Rebooting instance [ 1423.914289] env[63297]: DEBUG nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1423.945722] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1423.945964] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1423.946139] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1423.946318] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1423.946465] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1423.946611] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1423.946815] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1423.946970] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1423.947153] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1423.947313] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1423.947484] env[63297]: DEBUG nova.virt.hardware [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1423.948378] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef51b08b-9671-4e52-8433-adb5b347af34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.957869] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c6b0c4-4239-4203-ac00-9db759055dc5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.002222] env[63297]: DEBUG nova.compute.manager [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63297) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1424.002576] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.012142] env[63297]: DEBUG nova.scheduler.client.report [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1424.257336] env[63297]: DEBUG oslo_concurrency.lockutils [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.257541] env[63297]: DEBUG oslo_concurrency.lockutils [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquired lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.257711] env[63297]: DEBUG nova.network.neutron [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.526054] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.528857] env[63297]: DEBUG nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1424.530026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.029s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.531526] env[63297]: INFO nova.compute.claims [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1424.844016] env[63297]: DEBUG nova.network.neutron [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Successfully updated port: 902a1c73-2ddc-4fab-8f16-538934cbd123 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1425.036935] env[63297]: DEBUG nova.compute.utils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1425.038512] env[63297]: DEBUG nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1425.042326] env[63297]: DEBUG nova.network.neutron [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1425.102222] env[63297]: DEBUG nova.policy [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83ddfb07a6d648be89aff489836cff7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '734a95312d7d4da38201457d4f542a9e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1425.218195] env[63297]: DEBUG nova.network.neutron [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Updating instance_info_cache with network_info: [{"id": "d6cfb890-e1a0-457e-801f-615234386b8f", "address": "fa:16:3e:7d:3b:5f", "network": {"id": "c0ff79df-83e5-4ec9-9ef7-0f0172b39d9a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1638451245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2dc4f669c9b04dd5a958165c6149138d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6cfb890-e1", "ovs_interfaceid": "d6cfb890-e1a0-457e-801f-615234386b8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.347475] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "refresh_cache-e7fae121-174f-4955-a185-b3f92c6ab110" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.347852] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "refresh_cache-e7fae121-174f-4955-a185-b3f92c6ab110" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.347852] env[63297]: DEBUG nova.network.neutron [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1425.431357] env[63297]: DEBUG nova.compute.manager [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Received event network-vif-plugged-902a1c73-2ddc-4fab-8f16-538934cbd123 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1425.431596] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] Acquiring lock "e7fae121-174f-4955-a185-b3f92c6ab110-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.431804] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] Lock "e7fae121-174f-4955-a185-b3f92c6ab110-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.431982] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] Lock "e7fae121-174f-4955-a185-b3f92c6ab110-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.432689] env[63297]: DEBUG nova.compute.manager [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] No waiting events found dispatching network-vif-plugged-902a1c73-2ddc-4fab-8f16-538934cbd123 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1425.432918] env[63297]: WARNING nova.compute.manager [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Received unexpected event network-vif-plugged-902a1c73-2ddc-4fab-8f16-538934cbd123 for instance with vm_state building and task_state spawning. [ 1425.433120] env[63297]: DEBUG nova.compute.manager [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Received event network-changed-902a1c73-2ddc-4fab-8f16-538934cbd123 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1425.433286] env[63297]: DEBUG nova.compute.manager [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Refreshing instance network info cache due to event network-changed-902a1c73-2ddc-4fab-8f16-538934cbd123. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1425.433457] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] Acquiring lock "refresh_cache-e7fae121-174f-4955-a185-b3f92c6ab110" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.452492] env[63297]: DEBUG nova.network.neutron [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Successfully created port: bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1425.546098] env[63297]: DEBUG nova.compute.utils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1425.721241] env[63297]: DEBUG oslo_concurrency.lockutils [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Releasing lock "refresh_cache-d2436717-7230-448f-b310-d062b1f11c52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.723726] env[63297]: DEBUG nova.compute.manager [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1425.727184] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9205c1-e875-4454-9438-5ab2f504f900 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.912953] env[63297]: DEBUG nova.network.neutron [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1426.049406] env[63297]: DEBUG nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1426.155318] env[63297]: DEBUG nova.network.neutron [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Updating instance_info_cache with network_info: [{"id": "902a1c73-2ddc-4fab-8f16-538934cbd123", "address": "fa:16:3e:da:0f:8f", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap902a1c73-2d", "ovs_interfaceid": "902a1c73-2ddc-4fab-8f16-538934cbd123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.169719] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab6e28d-66a2-4bf5-9ea0-8b6e0dbcb99a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.183050] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21efc6c-889f-44ef-b42d-e5a3aaa044c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.226468] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55c597a-1971-4b0f-b712-bc513215b44a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.235029] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c94956-40d5-4af8-acf3-2201beb1174a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.255980] env[63297]: DEBUG nova.compute.provider_tree [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.548027] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquiring lock "ac112251-8cc3-4f57-8983-8a07e2a068f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.548027] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "ac112251-8cc3-4f57-8983-8a07e2a068f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.657217] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "refresh_cache-e7fae121-174f-4955-a185-b3f92c6ab110" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.657829] env[63297]: DEBUG nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Instance network_info: |[{"id": "902a1c73-2ddc-4fab-8f16-538934cbd123", "address": "fa:16:3e:da:0f:8f", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap902a1c73-2d", "ovs_interfaceid": "902a1c73-2ddc-4fab-8f16-538934cbd123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1426.658637] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] Acquired lock "refresh_cache-e7fae121-174f-4955-a185-b3f92c6ab110" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.658871] env[63297]: DEBUG nova.network.neutron [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Refreshing network info cache for port 902a1c73-2ddc-4fab-8f16-538934cbd123 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.660618] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:0f:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52358fcc-0d9f-45dd-8c75-db533fd992c3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '902a1c73-2ddc-4fab-8f16-538934cbd123', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1426.668673] env[63297]: DEBUG oslo.service.loopingcall [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.669795] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1426.670046] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8e84529-591e-4abc-9022-bb6db00af175 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.694193] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1426.694193] env[63297]: value = "task-1697526" [ 1426.694193] env[63297]: _type = "Task" [ 1426.694193] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.702701] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697526, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.747061] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff016cfe-7224-40ad-9da9-2e9c2279f0ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.754414] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Doing hard reboot of VM {{(pid=63297) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1426.754669] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-f0424602-d423-4318-b55b-8906fe077471 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.758182] env[63297]: DEBUG nova.scheduler.client.report [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1426.762667] env[63297]: DEBUG oslo_vmware.api [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1426.762667] env[63297]: value = "task-1697527" [ 1426.762667] env[63297]: _type = "Task" [ 1426.762667] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.772332] env[63297]: DEBUG oslo_vmware.api [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697527, 'name': ResetVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.049869] env[63297]: DEBUG nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1427.066973] env[63297]: DEBUG nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1427.097031] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:22:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='812128200',id=18,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1361032745',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1427.097031] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1427.097031] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1427.097256] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1427.097339] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1427.097493] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1427.097710] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1427.097869] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1427.098047] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1427.098214] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1427.098390] env[63297]: DEBUG nova.virt.hardware [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1427.099540] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e950589b-5825-46d8-96b2-e7c41ef4eda9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.107872] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae2a646-761f-4d2f-af60-07b2fdf57a65 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.205216] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697526, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.268379] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.738s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.269129] env[63297]: DEBUG nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1427.274897] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.209s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.274897] env[63297]: DEBUG nova.objects.instance [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lazy-loading 'resources' on Instance uuid 192c3a5d-3a23-4f78-8dc7-a256b6d9381d {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.290089] env[63297]: DEBUG oslo_vmware.api [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697527, 'name': ResetVM_Task, 'duration_secs': 0.095847} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.291144] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Did hard reboot of VM {{(pid=63297) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1427.291340] env[63297]: DEBUG nova.compute.manager [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1427.292321] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0669ceca-a042-4a03-b28d-75c8e1ee1572 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.430178] env[63297]: DEBUG nova.network.neutron [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Successfully updated port: bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1427.467841] env[63297]: DEBUG nova.network.neutron [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Updated VIF entry in instance network info cache for port 902a1c73-2ddc-4fab-8f16-538934cbd123. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1427.468224] env[63297]: DEBUG nova.network.neutron [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Updating instance_info_cache with network_info: [{"id": "902a1c73-2ddc-4fab-8f16-538934cbd123", "address": "fa:16:3e:da:0f:8f", "network": {"id": "37468c24-d4a3-498d-9a46-9eb69e62b4b7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1496183778-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1348601359224186bf59b12bfa5f1ef0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap902a1c73-2d", "ovs_interfaceid": "902a1c73-2ddc-4fab-8f16-538934cbd123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.485599] env[63297]: DEBUG nova.compute.manager [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Received event network-vif-plugged-bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1427.485929] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] Acquiring lock "b261c90f-642d-42b7-8b79-d87eeaf0537a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.486498] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] Lock "b261c90f-642d-42b7-8b79-d87eeaf0537a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.486719] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] Lock "b261c90f-642d-42b7-8b79-d87eeaf0537a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.486928] env[63297]: DEBUG nova.compute.manager [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] No waiting events found dispatching network-vif-plugged-bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1427.487140] env[63297]: WARNING nova.compute.manager [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Received unexpected event network-vif-plugged-bc6f8a91-4b3e-4532-be2b-27a1ebd119aa for instance with vm_state building and task_state spawning. [ 1427.487513] env[63297]: DEBUG nova.compute.manager [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Received event network-changed-bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1427.487717] env[63297]: DEBUG nova.compute.manager [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Refreshing instance network info cache due to event network-changed-bc6f8a91-4b3e-4532-be2b-27a1ebd119aa. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1427.487946] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] Acquiring lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.488148] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] Acquired lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.488348] env[63297]: DEBUG nova.network.neutron [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Refreshing network info cache for port bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1427.573605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.705423] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697526, 'name': CreateVM_Task, 'duration_secs': 0.691263} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.705585] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1427.706240] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.706404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.706784] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1427.707054] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7331b727-3a7e-47b5-8a17-420ce0464057 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.712795] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1427.712795] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52915802-fdac-bb6e-99b5-bffb87f76fab" [ 1427.712795] env[63297]: _type = "Task" [ 1427.712795] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.722091] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52915802-fdac-bb6e-99b5-bffb87f76fab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.776080] env[63297]: DEBUG nova.compute.utils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1427.777320] env[63297]: DEBUG nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1427.777497] env[63297]: DEBUG nova.network.neutron [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1427.817268] env[63297]: DEBUG oslo_concurrency.lockutils [None req-82b80a2d-e16d-4edb-a7f1-e5203213dfc6 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "d2436717-7230-448f-b310-d062b1f11c52" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.088s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.824709] env[63297]: DEBUG nova.policy [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d8413d4aad4ed8a1fa9e436de117ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc81b0f87c64b2283eb0ece21fb31a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1427.937167] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.971189] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ac51380-64e3-4eed-a464-504b3df457d6 req-da29d82e-7dcd-45c6-bc5d-4e47da183328 service nova] Releasing lock "refresh_cache-e7fae121-174f-4955-a185-b3f92c6ab110" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.047685] env[63297]: DEBUG nova.network.neutron [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1428.146514] env[63297]: DEBUG nova.network.neutron [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.181717] env[63297]: DEBUG nova.network.neutron [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Successfully created port: 1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1428.225188] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52915802-fdac-bb6e-99b5-bffb87f76fab, 'name': SearchDatastore_Task, 'duration_secs': 0.017413} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.227657] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.227885] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1428.228146] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.228296] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.228475] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1428.229221] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf5e0b58-a52d-4adc-ab8e-0217900af0d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.240960] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1428.242247] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1428.244381] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8d511bb-43f6-420c-bd1f-92d3eed14ed1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.255141] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1428.255141] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52476a33-b3eb-03e1-f02c-b5e29f552443" [ 1428.255141] env[63297]: _type = "Task" [ 1428.255141] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.266626] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52476a33-b3eb-03e1-f02c-b5e29f552443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.280706] env[63297]: DEBUG nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1428.324470] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef099c48-9a1b-45e7-b4ed-88a13317e118 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.333282] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c2a7f6-3889-481e-a5f1-342968068d7a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.363917] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1216fc1-f84d-4b1c-bcca-da2385db7f49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.371812] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fbae02-83d9-4438-9233-23cf3288097c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.388737] env[63297]: DEBUG nova.compute.provider_tree [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.654583] env[63297]: DEBUG oslo_concurrency.lockutils [req-1c0a8e92-1897-4365-aabc-0569c5960994 req-28123b35-ab24-44e7-abf3-73b5bd2e4585 service nova] Releasing lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.655583] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquired lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.656156] env[63297]: DEBUG nova.network.neutron [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1428.752714] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "d2436717-7230-448f-b310-d062b1f11c52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.752995] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "d2436717-7230-448f-b310-d062b1f11c52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.753225] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "d2436717-7230-448f-b310-d062b1f11c52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.753408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "d2436717-7230-448f-b310-d062b1f11c52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.753586] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "d2436717-7230-448f-b310-d062b1f11c52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.755878] env[63297]: INFO nova.compute.manager [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Terminating instance [ 1428.761328] env[63297]: DEBUG nova.compute.manager [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1428.761526] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1428.762358] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950131a1-fa4d-42eb-9b21-d583da402659 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.773806] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52476a33-b3eb-03e1-f02c-b5e29f552443, 'name': SearchDatastore_Task, 'duration_secs': 0.022868} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.774488] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1428.775603] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffedbcc6-4d1c-4c1d-a374-3ef5b03c1d2e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.777843] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baab359f-5719-4e8b-b348-0156a202717f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.784666] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1428.784666] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529ae12f-1773-5083-6c4a-69023e8eab79" [ 1428.784666] env[63297]: _type = "Task" [ 1428.784666] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.794423] env[63297]: DEBUG oslo_vmware.api [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1428.794423] env[63297]: value = "task-1697528" [ 1428.794423] env[63297]: _type = "Task" [ 1428.794423] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.802266] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529ae12f-1773-5083-6c4a-69023e8eab79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.809679] env[63297]: DEBUG oslo_vmware.api [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.890043] env[63297]: DEBUG nova.scheduler.client.report [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1429.218052] env[63297]: DEBUG nova.network.neutron [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1429.297221] env[63297]: DEBUG nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1429.299825] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529ae12f-1773-5083-6c4a-69023e8eab79, 'name': SearchDatastore_Task, 'duration_secs': 0.022995} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.303812] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.303812] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] e7fae121-174f-4955-a185-b3f92c6ab110/e7fae121-174f-4955-a185-b3f92c6ab110.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1429.307012] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01a7b800-74a4-4540-88b4-25973074c5ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.316346] env[63297]: DEBUG oslo_vmware.api [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697528, 'name': PowerOffVM_Task, 'duration_secs': 0.493622} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.318304] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1429.318561] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1429.318940] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1429.318940] env[63297]: value = "task-1697529" [ 1429.318940] env[63297]: _type = "Task" [ 1429.318940] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.319341] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fdf733c-0f6d-4617-801b-bb163acde0f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.331636] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697529, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.334017] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1429.334270] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1429.334440] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1429.334627] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1429.334773] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1429.334971] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1429.335221] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1429.335384] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1429.335550] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1429.335710] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1429.335882] env[63297]: DEBUG nova.virt.hardware [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1429.336702] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4c0262-5961-4b0b-8d43-3b23531cb0ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.344911] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a51093-e553-48ee-b647-e9dedddb4d03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.396629] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.399533] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1429.399738] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1429.399916] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Deleting the datastore file [datastore1] d2436717-7230-448f-b310-d062b1f11c52 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1429.400451] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.049s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.400676] env[63297]: DEBUG nova.objects.instance [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'resources' on Instance uuid ef851d71-788d-42f8-a824-5d30a89e957b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1429.402054] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9cbf9df-de07-4691-8de9-4ffcbda91dee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.409921] env[63297]: DEBUG oslo_vmware.api [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for the task: (returnval){ [ 1429.409921] env[63297]: value = "task-1697531" [ 1429.409921] env[63297]: _type = "Task" [ 1429.409921] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.410874] env[63297]: DEBUG nova.network.neutron [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Updating instance_info_cache with network_info: [{"id": "bc6f8a91-4b3e-4532-be2b-27a1ebd119aa", "address": "fa:16:3e:1c:79:55", "network": {"id": "bfd15893-9bb1-46a3-bf31-db474ed0269a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1591634149-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734a95312d7d4da38201457d4f542a9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc6f8a91-4b", "ovs_interfaceid": "bc6f8a91-4b3e-4532-be2b-27a1ebd119aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.422102] env[63297]: DEBUG oslo_vmware.api [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.423073] env[63297]: INFO nova.scheduler.client.report [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleted allocations for instance 192c3a5d-3a23-4f78-8dc7-a256b6d9381d [ 1429.700182] env[63297]: DEBUG nova.compute.manager [req-7c69a686-52b3-4de0-946f-e3d801ef4f71 req-b60b9bf3-e04f-4f68-baa9-352cf6ecf9c3 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Received event network-vif-plugged-1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1429.700436] env[63297]: DEBUG oslo_concurrency.lockutils [req-7c69a686-52b3-4de0-946f-e3d801ef4f71 req-b60b9bf3-e04f-4f68-baa9-352cf6ecf9c3 service nova] Acquiring lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.700679] env[63297]: DEBUG oslo_concurrency.lockutils [req-7c69a686-52b3-4de0-946f-e3d801ef4f71 req-b60b9bf3-e04f-4f68-baa9-352cf6ecf9c3 service nova] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.700854] env[63297]: DEBUG oslo_concurrency.lockutils [req-7c69a686-52b3-4de0-946f-e3d801ef4f71 req-b60b9bf3-e04f-4f68-baa9-352cf6ecf9c3 service nova] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.701290] env[63297]: DEBUG nova.compute.manager [req-7c69a686-52b3-4de0-946f-e3d801ef4f71 req-b60b9bf3-e04f-4f68-baa9-352cf6ecf9c3 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] No waiting events found dispatching network-vif-plugged-1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1429.701437] env[63297]: WARNING nova.compute.manager [req-7c69a686-52b3-4de0-946f-e3d801ef4f71 req-b60b9bf3-e04f-4f68-baa9-352cf6ecf9c3 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Received unexpected event network-vif-plugged-1952432a-7339-4c5d-80fc-5dac66b659e2 for instance with vm_state building and task_state spawning. [ 1429.836346] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697529, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.876248] env[63297]: DEBUG nova.network.neutron [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Successfully updated port: 1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1429.913482] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Releasing lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.913830] env[63297]: DEBUG nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Instance network_info: |[{"id": "bc6f8a91-4b3e-4532-be2b-27a1ebd119aa", "address": "fa:16:3e:1c:79:55", "network": {"id": "bfd15893-9bb1-46a3-bf31-db474ed0269a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1591634149-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734a95312d7d4da38201457d4f542a9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc6f8a91-4b", "ovs_interfaceid": "bc6f8a91-4b3e-4532-be2b-27a1ebd119aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1429.920624] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:79:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc6f8a91-4b3e-4532-be2b-27a1ebd119aa', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1429.928769] env[63297]: DEBUG oslo.service.loopingcall [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.930848] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1429.931314] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72547491-8629-45ff-b9a6-b6c369759187 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "192c3a5d-3a23-4f78-8dc7-a256b6d9381d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.996s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.936044] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a819a62-d444-4125-918c-a1efca3b550c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.960515] env[63297]: DEBUG oslo_vmware.api [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Task: {'id': task-1697531, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.352501} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.961784] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1429.961784] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1429.961985] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1429.962026] env[63297]: INFO nova.compute.manager [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] [instance: d2436717-7230-448f-b310-d062b1f11c52] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1429.962306] env[63297]: DEBUG oslo.service.loopingcall [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.964810] env[63297]: DEBUG nova.compute.manager [-] [instance: d2436717-7230-448f-b310-d062b1f11c52] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1429.964810] env[63297]: DEBUG nova.network.neutron [-] [instance: d2436717-7230-448f-b310-d062b1f11c52] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1429.967827] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1429.967827] env[63297]: value = "task-1697532" [ 1429.967827] env[63297]: _type = "Task" [ 1429.967827] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.978807] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697532, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.335739] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697529, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577603} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.336139] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] e7fae121-174f-4955-a185-b3f92c6ab110/e7fae121-174f-4955-a185-b3f92c6ab110.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1430.336386] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1430.336690] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e21537a-fd91-40e2-ad45-c2231b07b047 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.343865] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1430.343865] env[63297]: value = "task-1697533" [ 1430.343865] env[63297]: _type = "Task" [ 1430.343865] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.355983] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697533, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.380536] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.380707] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.381719] env[63297]: DEBUG nova.network.neutron [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1430.413908] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292e7c1e-c794-4974-8d11-8a8d52b3462a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.422029] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dd65b1-06c6-4e54-afb0-1762d6c323a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.451819] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc7e18c-cf58-49bf-9538-3efa70e3895f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.461011] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cad662-43ec-4b77-883e-1214d8a0a754 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.479622] env[63297]: DEBUG nova.compute.provider_tree [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1430.486475] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697532, 'name': CreateVM_Task, 'duration_secs': 0.435912} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.486668] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1430.487901] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.488083] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.488552] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1430.489158] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6540295d-037b-4df0-89e0-cc8bdea70c8e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.494268] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1430.494268] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de558d-e9a1-3ba3-0d1e-490a16ca7891" [ 1430.494268] env[63297]: _type = "Task" [ 1430.494268] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.502852] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de558d-e9a1-3ba3-0d1e-490a16ca7891, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.703494] env[63297]: DEBUG nova.network.neutron [-] [instance: d2436717-7230-448f-b310-d062b1f11c52] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.854608] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697533, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079601} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.854885] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1430.855728] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c14ce44-a2ec-4622-92ec-3f69cc9e9e9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.878501] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] e7fae121-174f-4955-a185-b3f92c6ab110/e7fae121-174f-4955-a185-b3f92c6ab110.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1430.878810] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89d93959-3c76-4ef8-b28c-2993522385f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.910266] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1430.910266] env[63297]: value = "task-1697534" [ 1430.910266] env[63297]: _type = "Task" [ 1430.910266] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.919355] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697534, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.932776] env[63297]: DEBUG nova.network.neutron [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1430.984096] env[63297]: DEBUG nova.scheduler.client.report [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1431.006205] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de558d-e9a1-3ba3-0d1e-490a16ca7891, 'name': SearchDatastore_Task, 'duration_secs': 0.009648} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.006504] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.006736] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1431.006967] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.007130] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.007310] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.007574] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fe0d5a9-4631-4bb8-9b66-636758355ead {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.016719] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.016892] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1431.017630] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc47c8e9-0b8b-4211-a37a-f04145afee64 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.023477] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1431.023477] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5208aaa1-594a-63fb-bc85-41ee72ab1363" [ 1431.023477] env[63297]: _type = "Task" [ 1431.023477] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.033420] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5208aaa1-594a-63fb-bc85-41ee72ab1363, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.114413] env[63297]: DEBUG nova.network.neutron [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updating instance_info_cache with network_info: [{"id": "1952432a-7339-4c5d-80fc-5dac66b659e2", "address": "fa:16:3e:80:2c:fe", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1952432a-73", "ovs_interfaceid": "1952432a-7339-4c5d-80fc-5dac66b659e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.206494] env[63297]: INFO nova.compute.manager [-] [instance: d2436717-7230-448f-b310-d062b1f11c52] Took 1.24 seconds to deallocate network for instance. [ 1431.421807] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697534, 'name': ReconfigVM_Task, 'duration_secs': 0.277623} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.422164] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Reconfigured VM instance instance-0000003d to attach disk [datastore1] e7fae121-174f-4955-a185-b3f92c6ab110/e7fae121-174f-4955-a185-b3f92c6ab110.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1431.422798] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43041b3f-0ab1-46f2-8528-937c2c6673fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.430181] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1431.430181] env[63297]: value = "task-1697535" [ 1431.430181] env[63297]: _type = "Task" [ 1431.430181] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.440045] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697535, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.492515] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.092s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.495351] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.785s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.495516] env[63297]: DEBUG nova.objects.instance [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lazy-loading 'resources' on Instance uuid 4e6b1296-9e19-4047-9c38-dc94c686d0cb {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1431.519336] env[63297]: INFO nova.scheduler.client.report [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleted allocations for instance ef851d71-788d-42f8-a824-5d30a89e957b [ 1431.535684] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5208aaa1-594a-63fb-bc85-41ee72ab1363, 'name': SearchDatastore_Task, 'duration_secs': 0.009461} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.536364] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62cc621c-0fc5-4360-a3a9-f43adfbfa06b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.542963] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1431.542963] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ea1a6b-5d83-9388-4643-9313f2a550be" [ 1431.542963] env[63297]: _type = "Task" [ 1431.542963] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.551850] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ea1a6b-5d83-9388-4643-9313f2a550be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.617674] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.618064] env[63297]: DEBUG nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Instance network_info: |[{"id": "1952432a-7339-4c5d-80fc-5dac66b659e2", "address": "fa:16:3e:80:2c:fe", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1952432a-73", "ovs_interfaceid": "1952432a-7339-4c5d-80fc-5dac66b659e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1431.618448] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:2c:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1952432a-7339-4c5d-80fc-5dac66b659e2', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1431.626086] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Creating folder: Project (9cc81b0f87c64b2283eb0ece21fb31a1). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1431.626365] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f067b0d-6245-486f-bab0-718c8661fe19 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.638228] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Created folder: Project (9cc81b0f87c64b2283eb0ece21fb31a1) in parent group-v353718. [ 1431.638384] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Creating folder: Instances. Parent ref: group-v353904. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1431.638665] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81c6ab04-1d5a-476d-ae71-9b9d33fb8b4b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.648995] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Created folder: Instances in parent group-v353904. [ 1431.649335] env[63297]: DEBUG oslo.service.loopingcall [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.649512] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1431.649760] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdcb5ca2-a6da-4c21-ae93-bc3cf2fa3204 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.671124] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1431.671124] env[63297]: value = "task-1697538" [ 1431.671124] env[63297]: _type = "Task" [ 1431.671124] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.679657] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697538, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.712452] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.731683] env[63297]: DEBUG nova.compute.manager [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Received event network-changed-1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1431.731683] env[63297]: DEBUG nova.compute.manager [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Refreshing instance network info cache due to event network-changed-1952432a-7339-4c5d-80fc-5dac66b659e2. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1431.731683] env[63297]: DEBUG oslo_concurrency.lockutils [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] Acquiring lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.731683] env[63297]: DEBUG oslo_concurrency.lockutils [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] Acquired lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.731824] env[63297]: DEBUG nova.network.neutron [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Refreshing network info cache for port 1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1431.940848] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697535, 'name': Rename_Task, 'duration_secs': 0.157262} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.941188] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1431.941719] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f698e1e-a39a-4285-b394-c652a67392bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.949221] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1431.949221] env[63297]: value = "task-1697539" [ 1431.949221] env[63297]: _type = "Task" [ 1431.949221] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.958614] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697539, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.026818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4d2aaa2e-37a7-44bf-943a-b81f4085ff8d tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "ef851d71-788d-42f8-a824-5d30a89e957b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.532s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.027845] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f92e960-7e22-4bc8-b75b-8ad3e9d75c5c req-35804d2c-49dc-4635-a186-2b9d50e7af49 service nova] Acquired lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.029021] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e93396-009e-4ad2-8f61-fdd459b9a2be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.039623] env[63297]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1432.039623] env[63297]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=63297) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1432.039927] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abf614de-3f39-4a6b-aed1-fbf113e0d728 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.054817] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4232de4-fa2f-46cd-8b2a-21e4a5c9ffdb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.073622] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ea1a6b-5d83-9388-4643-9313f2a550be, 'name': SearchDatastore_Task, 'duration_secs': 0.012196} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.074637] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.074863] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b261c90f-642d-42b7-8b79-d87eeaf0537a/b261c90f-642d-42b7-8b79-d87eeaf0537a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1432.075171] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fd28042-4e12-4285-be57-e0a2c279f9a3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.085162] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1432.085162] env[63297]: value = "task-1697540" [ 1432.085162] env[63297]: _type = "Task" [ 1432.085162] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.105971] env[63297]: ERROR root [req-3f92e960-7e22-4bc8-b75b-8ad3e9d75c5c req-35804d2c-49dc-4635-a186-2b9d50e7af49 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-353743' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-353743' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-353743' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-353743'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-353743' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-353743' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-353743'}\n"]: nova.exception.InstanceNotFound: Instance ef851d71-788d-42f8-a824-5d30a89e957b could not be found. [ 1432.106197] env[63297]: DEBUG oslo_concurrency.lockutils [req-3f92e960-7e22-4bc8-b75b-8ad3e9d75c5c req-35804d2c-49dc-4635-a186-2b9d50e7af49 service nova] Releasing lock "ef851d71-788d-42f8-a824-5d30a89e957b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.106425] env[63297]: DEBUG nova.compute.manager [req-3f92e960-7e22-4bc8-b75b-8ad3e9d75c5c req-35804d2c-49dc-4635-a186-2b9d50e7af49 service nova] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Detach interface failed, port_id=1c481d81-78fe-48f3-9eb8-9180cb78ecdf, reason: Instance ef851d71-788d-42f8-a824-5d30a89e957b could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1432.113254] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.182810] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697538, 'name': CreateVM_Task, 'duration_secs': 0.491956} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.185318] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1432.186307] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.186397] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.186891] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1432.187204] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06096ced-d7cd-4ce6-aa78-d3669bab9dd4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.192898] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1432.192898] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528b4ddd-cda6-5956-c72b-4b417ebbeb37" [ 1432.192898] env[63297]: _type = "Task" [ 1432.192898] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.206230] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528b4ddd-cda6-5956-c72b-4b417ebbeb37, 'name': SearchDatastore_Task, 'duration_secs': 0.009616} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.206477] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.207150] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1432.207150] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.207150] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.207337] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1432.207601] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b650568-4518-47a3-99ab-9e47fcb858f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.219518] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1432.219615] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1432.220569] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5afabb6-9962-4578-be7c-7755173c576d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.227466] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1432.227466] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a312b4-c37f-9d6f-5e93-87a240263d9e" [ 1432.227466] env[63297]: _type = "Task" [ 1432.227466] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.238127] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a312b4-c37f-9d6f-5e93-87a240263d9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.469437] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697539, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.555683] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50772cd-9eb6-4549-8134-f90de700270c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.566156] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020fdfaa-dece-4824-bd4d-73b2bf671463 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.571965] env[63297]: DEBUG nova.network.neutron [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updated VIF entry in instance network info cache for port 1952432a-7339-4c5d-80fc-5dac66b659e2. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1432.572412] env[63297]: DEBUG nova.network.neutron [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updating instance_info_cache with network_info: [{"id": "1952432a-7339-4c5d-80fc-5dac66b659e2", "address": "fa:16:3e:80:2c:fe", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1952432a-73", "ovs_interfaceid": "1952432a-7339-4c5d-80fc-5dac66b659e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.613882] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb76a21-9f29-4666-86bf-bdc9259bb294 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.632212] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90d6710-a786-4988-9a4b-b60e99c9ad99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.639111] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697540, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515721} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.639669] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b261c90f-642d-42b7-8b79-d87eeaf0537a/b261c90f-642d-42b7-8b79-d87eeaf0537a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1432.639825] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1432.641682] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-387db3e1-10a7-4fe7-98b1-89b06ee9aa00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.658797] env[63297]: DEBUG nova.compute.provider_tree [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.669191] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1432.669191] env[63297]: value = "task-1697541" [ 1432.669191] env[63297]: _type = "Task" [ 1432.669191] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.683968] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697541, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.739406] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a312b4-c37f-9d6f-5e93-87a240263d9e, 'name': SearchDatastore_Task, 'duration_secs': 0.009037} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.740263] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-458322af-c122-47c5-a6ed-f0d66c345c24 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.747857] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1432.747857] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cf146c-0a37-f4e4-6097-06372e31da52" [ 1432.747857] env[63297]: _type = "Task" [ 1432.747857] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.757482] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cf146c-0a37-f4e4-6097-06372e31da52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.961850] env[63297]: DEBUG oslo_vmware.api [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697539, 'name': PowerOnVM_Task, 'duration_secs': 0.742414} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.962075] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1432.962264] env[63297]: INFO nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Took 9.05 seconds to spawn the instance on the hypervisor. [ 1432.962484] env[63297]: DEBUG nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1432.963313] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e40b178-016a-49d2-b7c6-c887a9c9a266 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.076106] env[63297]: DEBUG oslo_concurrency.lockutils [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] Releasing lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.076308] env[63297]: DEBUG nova.compute.manager [req-e09c178a-6fa7-46a4-8d9a-e26386ce24f4 req-ceec675e-6917-4685-b6d0-7f10255d8409 service nova] [instance: d2436717-7230-448f-b310-d062b1f11c52] Received event network-vif-deleted-d6cfb890-e1a0-457e-801f-615234386b8f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1433.166177] env[63297]: DEBUG nova.scheduler.client.report [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1433.188517] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697541, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121063} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.188517] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1433.189260] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5091dc9a-d5bc-4d86-8780-446098a44bec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.216398] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] b261c90f-642d-42b7-8b79-d87eeaf0537a/b261c90f-642d-42b7-8b79-d87eeaf0537a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1433.217446] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84a3b9b8-6609-4a56-9384-5876eeb0fc9e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.244028] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1433.244028] env[63297]: value = "task-1697542" [ 1433.244028] env[63297]: _type = "Task" [ 1433.244028] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.258342] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697542, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.265893] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cf146c-0a37-f4e4-6097-06372e31da52, 'name': SearchDatastore_Task, 'duration_secs': 0.014178} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.266412] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.266910] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69/fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1433.269147] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d177aa8-77be-4413-ab6d-5f5df62dfbf4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.280150] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1433.280150] env[63297]: value = "task-1697543" [ 1433.280150] env[63297]: _type = "Task" [ 1433.280150] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.291783] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697543, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.482926] env[63297]: INFO nova.compute.manager [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Took 48.13 seconds to build instance. [ 1433.683397] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.687026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.987s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.689295] env[63297]: INFO nova.compute.claims [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1433.713728] env[63297]: INFO nova.scheduler.client.report [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Deleted allocations for instance 4e6b1296-9e19-4047-9c38-dc94c686d0cb [ 1433.755184] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697542, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.800977] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697543, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.843948] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "e7fae121-174f-4955-a185-b3f92c6ab110" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.986674] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4028a61a-b0c1-444e-aabd-3ff31a694737 tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "e7fae121-174f-4955-a185-b3f92c6ab110" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.288s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.987024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "e7fae121-174f-4955-a185-b3f92c6ab110" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.143s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.987265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "e7fae121-174f-4955-a185-b3f92c6ab110-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.987492] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "e7fae121-174f-4955-a185-b3f92c6ab110-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.987667] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "e7fae121-174f-4955-a185-b3f92c6ab110-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.989970] env[63297]: INFO nova.compute.manager [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Terminating instance [ 1433.992494] env[63297]: DEBUG nova.compute.manager [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1433.992494] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1433.993161] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebde521-e7b3-40b7-8639-84adf5e4fcb8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.008230] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1434.008230] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76b24add-03e2-4e63-b7f1-826747c1a962 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.018274] env[63297]: DEBUG oslo_vmware.api [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1434.018274] env[63297]: value = "task-1697544" [ 1434.018274] env[63297]: _type = "Task" [ 1434.018274] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.032020] env[63297]: DEBUG oslo_vmware.api [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.224079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c067c8e8-897a-4278-b3a1-1951b40bdaa7 tempest-ServersTestBootFromVolume-1738108197 tempest-ServersTestBootFromVolume-1738108197-project-member] Lock "4e6b1296-9e19-4047-9c38-dc94c686d0cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.618s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.254908] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697542, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.297460] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697543, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.86745} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.299030] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69/fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1434.299030] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1434.299030] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e30ca2df-b68d-4a28-abf7-88ed812f7590 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.313025] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1434.313025] env[63297]: value = "task-1697545" [ 1434.313025] env[63297]: _type = "Task" [ 1434.313025] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.329398] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697545, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.532495] env[63297]: DEBUG oslo_vmware.api [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697544, 'name': PowerOffVM_Task, 'duration_secs': 0.371347} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.532859] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1434.533145] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1434.533457] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51e2ad8f-1df1-429a-8ca7-c07c94a74555 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.621234] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1434.621234] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1434.621329] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleting the datastore file [datastore1] e7fae121-174f-4955-a185-b3f92c6ab110 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1434.624550] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3385f0ad-9ec3-4d5f-9278-9024ce2d0e81 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.631721] env[63297]: DEBUG oslo_vmware.api [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for the task: (returnval){ [ 1434.631721] env[63297]: value = "task-1697547" [ 1434.631721] env[63297]: _type = "Task" [ 1434.631721] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.643680] env[63297]: DEBUG oslo_vmware.api [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.754772] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697542, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.827556] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697545, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128113} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.827874] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1434.828669] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05cfd19-7d67-49aa-a2f5-ae8cf535a247 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.854655] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69/fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1434.857785] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-298ceab5-ec50-4688-836f-df9cd3b587b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.879599] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1434.879599] env[63297]: value = "task-1697548" [ 1434.879599] env[63297]: _type = "Task" [ 1434.879599] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.892225] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697548, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.142497] env[63297]: DEBUG oslo_vmware.api [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Task: {'id': task-1697547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.379349} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.142688] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1435.142862] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1435.143058] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1435.143234] env[63297]: INFO nova.compute.manager [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1435.143465] env[63297]: DEBUG oslo.service.loopingcall [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1435.146239] env[63297]: DEBUG nova.compute.manager [-] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1435.146341] env[63297]: DEBUG nova.network.neutron [-] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1435.240398] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b749003-7c47-46ea-a403-b8885b9adc2c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.251698] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd50fa66-1b86-4d5a-933f-3461d009ee15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.257855] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697542, 'name': ReconfigVM_Task, 'duration_secs': 1.650334} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.258522] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Reconfigured VM instance instance-0000003e to attach disk [datastore1] b261c90f-642d-42b7-8b79-d87eeaf0537a/b261c90f-642d-42b7-8b79-d87eeaf0537a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.258863] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63297) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1435.259499] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-6c9d5a59-a1f8-4914-b0d5-fb9af78fae55 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.291265] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898f7d01-662c-406d-a20a-54a32d5233ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.298641] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.298861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.300268] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1435.300268] env[63297]: value = "task-1697549" [ 1435.300268] env[63297]: _type = "Task" [ 1435.300268] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.309757] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33445b4f-374b-4e5f-9d6b-3c9f5fec5cb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.317567] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697549, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.328911] env[63297]: DEBUG nova.compute.provider_tree [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.390510] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697548, 'name': ReconfigVM_Task, 'duration_secs': 0.361649} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.390814] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Reconfigured VM instance instance-0000003f to attach disk [datastore1] fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69/fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.391464] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f91a30ee-6968-48d6-959d-fb4669dbc73f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.402843] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1435.402843] env[63297]: value = "task-1697550" [ 1435.402843] env[63297]: _type = "Task" [ 1435.402843] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.412264] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697550, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.524130] env[63297]: DEBUG nova.compute.manager [req-421f3190-64f1-410e-80da-a7c60947e6bf req-73d88603-0303-411d-8b8d-ba9bd05ee034 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Received event network-vif-deleted-902a1c73-2ddc-4fab-8f16-538934cbd123 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1435.524413] env[63297]: INFO nova.compute.manager [req-421f3190-64f1-410e-80da-a7c60947e6bf req-73d88603-0303-411d-8b8d-ba9bd05ee034 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Neutron deleted interface 902a1c73-2ddc-4fab-8f16-538934cbd123; detaching it from the instance and deleting it from the info cache [ 1435.524628] env[63297]: DEBUG nova.network.neutron [req-421f3190-64f1-410e-80da-a7c60947e6bf req-73d88603-0303-411d-8b8d-ba9bd05ee034 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.801333] env[63297]: DEBUG nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1435.813565] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697549, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.054933} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.813843] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63297) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1435.814715] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ea7e5b-97dc-483d-b867-e469a0ab4014 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.833134] env[63297]: DEBUG nova.scheduler.client.report [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1435.846330] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] b261c90f-642d-42b7-8b79-d87eeaf0537a/ephemeral_0.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1435.846945] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b4c7e0a-4a30-4a8f-8607-30effcd0cac6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.870793] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1435.870793] env[63297]: value = "task-1697551" [ 1435.870793] env[63297]: _type = "Task" [ 1435.870793] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.879781] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697551, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.913531] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697550, 'name': Rename_Task, 'duration_secs': 0.169757} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.913857] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1435.914138] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-523bb31d-fe64-4982-ba90-ae41239f50b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.922116] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1435.922116] env[63297]: value = "task-1697552" [ 1435.922116] env[63297]: _type = "Task" [ 1435.922116] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.927011] env[63297]: DEBUG nova.network.neutron [-] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.931828] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.028260] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9958049-4c1e-4c2b-b7a7-3c1e318e03a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.039288] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4b42e7-1669-4a50-acf0-9b2754be6fa0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.080564] env[63297]: DEBUG nova.compute.manager [req-421f3190-64f1-410e-80da-a7c60947e6bf req-73d88603-0303-411d-8b8d-ba9bd05ee034 service nova] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Detach interface failed, port_id=902a1c73-2ddc-4fab-8f16-538934cbd123, reason: Instance e7fae121-174f-4955-a185-b3f92c6ab110 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1436.348144] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.352311] env[63297]: DEBUG nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1436.355258] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.347s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.355258] env[63297]: DEBUG nova.objects.instance [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lazy-loading 'resources' on Instance uuid 5e158880-81a6-4d35-b1df-6fd59ba4a8ff {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1436.365829] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.384419] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697551, 'name': ReconfigVM_Task, 'duration_secs': 0.424628} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.385051] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Reconfigured VM instance instance-0000003e to attach disk [datastore1] b261c90f-642d-42b7-8b79-d87eeaf0537a/ephemeral_0.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1436.385751] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0b3115c-aebe-45b8-a85b-946a93e50513 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.394738] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1436.394738] env[63297]: value = "task-1697553" [ 1436.394738] env[63297]: _type = "Task" [ 1436.394738] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.405330] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697553, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.437529] env[63297]: INFO nova.compute.manager [-] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Took 1.29 seconds to deallocate network for instance. [ 1436.439347] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697552, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.864895] env[63297]: DEBUG nova.compute.utils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1436.869360] env[63297]: DEBUG nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1436.869558] env[63297]: DEBUG nova.network.neutron [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1436.905884] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697553, 'name': Rename_Task, 'duration_secs': 0.326418} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.906554] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1436.906834] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a40327a7-88aa-45c7-850a-dbfff7b4e76a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.914787] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1436.914787] env[63297]: value = "task-1697554" [ 1436.914787] env[63297]: _type = "Task" [ 1436.914787] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.923313] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697554, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.936674] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697552, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.948843] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.147772] env[63297]: DEBUG nova.policy [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20a91144677b4efba8ab91acd53d1c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33733e0599840618625ecb3e6bb6029', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1437.358027] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfadaf6-07d7-45cc-8e76-212ad01c60df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.366830] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444a5365-a2e9-4015-a598-8cd7d87671cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.372587] env[63297]: DEBUG nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1437.408537] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838d586f-9417-41d9-a43d-45b7dcb25c38 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.414252] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5799c53e-7a40-49c0-86e5-36f1a8926de8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.429365] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697554, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.438348] env[63297]: DEBUG nova.compute.provider_tree [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1437.449082] env[63297]: DEBUG oslo_vmware.api [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697552, 'name': PowerOnVM_Task, 'duration_secs': 1.252971} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.449354] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1437.449744] env[63297]: INFO nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1437.449744] env[63297]: DEBUG nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1437.450510] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69712518-08e6-4a48-a00f-76363f3f14b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.525293] env[63297]: DEBUG nova.network.neutron [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Successfully created port: 8b35ba63-0efc-493d-a8be-caa8cd5b2e21 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1437.934685] env[63297]: DEBUG oslo_vmware.api [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697554, 'name': PowerOnVM_Task, 'duration_secs': 0.910871} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.934983] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1437.935175] env[63297]: INFO nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Took 10.87 seconds to spawn the instance on the hypervisor. [ 1437.935320] env[63297]: DEBUG nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1437.936240] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfd8505-7211-4677-8b0c-b0e3174edd31 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.943882] env[63297]: DEBUG nova.scheduler.client.report [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1437.976175] env[63297]: INFO nova.compute.manager [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Took 50.49 seconds to build instance. [ 1438.382464] env[63297]: DEBUG nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1438.413292] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1438.413512] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1438.413663] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1438.413836] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1438.413973] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1438.414128] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1438.415203] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1438.415203] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1438.415203] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1438.415203] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1438.415203] env[63297]: DEBUG nova.virt.hardware [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1438.415859] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8e5262-1dab-4ab1-8b61-91e12f6ec5ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.426084] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79702089-8c54-495e-8b2b-bb8bc62480cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.451087] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.457469] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.313s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.458964] env[63297]: INFO nova.compute.claims [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1438.463737] env[63297]: INFO nova.compute.manager [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Took 52.97 seconds to build instance. [ 1438.477448] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10a9d83-2bde-4da3-aed4-0433b4a39ce5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.492s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.483127] env[63297]: INFO nova.scheduler.client.report [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleted allocations for instance 5e158880-81a6-4d35-b1df-6fd59ba4a8ff [ 1438.968028] env[63297]: DEBUG oslo_concurrency.lockutils [None req-862c10dd-ba29-4f5e-af8d-b01d2c7ba7aa tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b261c90f-642d-42b7-8b79-d87eeaf0537a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.743s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.992361] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5b3c7e7a-cacc-46f7-898f-549b30b48c08 tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "5e158880-81a6-4d35-b1df-6fd59ba4a8ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.909s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.145393] env[63297]: DEBUG nova.compute.manager [req-2ff703c0-c48b-44ba-899b-ce7059a1a5f0 req-24c66e43-6699-4ec7-abcc-56947313ef59 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Received event network-changed-1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1439.147708] env[63297]: DEBUG nova.compute.manager [req-2ff703c0-c48b-44ba-899b-ce7059a1a5f0 req-24c66e43-6699-4ec7-abcc-56947313ef59 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Refreshing instance network info cache due to event network-changed-1952432a-7339-4c5d-80fc-5dac66b659e2. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1439.148472] env[63297]: DEBUG oslo_concurrency.lockutils [req-2ff703c0-c48b-44ba-899b-ce7059a1a5f0 req-24c66e43-6699-4ec7-abcc-56947313ef59 service nova] Acquiring lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.149837] env[63297]: DEBUG oslo_concurrency.lockutils [req-2ff703c0-c48b-44ba-899b-ce7059a1a5f0 req-24c66e43-6699-4ec7-abcc-56947313ef59 service nova] Acquired lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.149837] env[63297]: DEBUG nova.network.neutron [req-2ff703c0-c48b-44ba-899b-ce7059a1a5f0 req-24c66e43-6699-4ec7-abcc-56947313ef59 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Refreshing network info cache for port 1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1439.325049] env[63297]: DEBUG nova.compute.manager [req-a914faef-3939-41cb-87ad-372172681560 req-e2b7c595-5a97-458b-82dc-db6614c30b8a service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Received event network-vif-plugged-8b35ba63-0efc-493d-a8be-caa8cd5b2e21 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1439.325290] env[63297]: DEBUG oslo_concurrency.lockutils [req-a914faef-3939-41cb-87ad-372172681560 req-e2b7c595-5a97-458b-82dc-db6614c30b8a service nova] Acquiring lock "708d1907-1619-4aa4-b0b3-ae58f046a760-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.325541] env[63297]: DEBUG oslo_concurrency.lockutils [req-a914faef-3939-41cb-87ad-372172681560 req-e2b7c595-5a97-458b-82dc-db6614c30b8a service nova] Lock "708d1907-1619-4aa4-b0b3-ae58f046a760-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.325727] env[63297]: DEBUG oslo_concurrency.lockutils [req-a914faef-3939-41cb-87ad-372172681560 req-e2b7c595-5a97-458b-82dc-db6614c30b8a service nova] Lock "708d1907-1619-4aa4-b0b3-ae58f046a760-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.325930] env[63297]: DEBUG nova.compute.manager [req-a914faef-3939-41cb-87ad-372172681560 req-e2b7c595-5a97-458b-82dc-db6614c30b8a service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] No waiting events found dispatching network-vif-plugged-8b35ba63-0efc-493d-a8be-caa8cd5b2e21 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1439.326110] env[63297]: WARNING nova.compute.manager [req-a914faef-3939-41cb-87ad-372172681560 req-e2b7c595-5a97-458b-82dc-db6614c30b8a service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Received unexpected event network-vif-plugged-8b35ba63-0efc-493d-a8be-caa8cd5b2e21 for instance with vm_state building and task_state spawning. [ 1439.513572] env[63297]: DEBUG nova.network.neutron [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Successfully updated port: 8b35ba63-0efc-493d-a8be-caa8cd5b2e21 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1440.018968] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "refresh_cache-708d1907-1619-4aa4-b0b3-ae58f046a760" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.018968] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "refresh_cache-708d1907-1619-4aa4-b0b3-ae58f046a760" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.018968] env[63297]: DEBUG nova.network.neutron [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.020377] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ee32df-06cc-47ff-846d-37143e6f5a0a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.029093] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f812eaf3-6e39-4b1c-9d04-0184ff64e8c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.039244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.039244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.039244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.039244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.039244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.040483] env[63297]: INFO nova.compute.manager [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Terminating instance [ 1440.042228] env[63297]: DEBUG nova.compute.manager [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1440.042416] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1440.043433] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c56ad49-0252-42b5-93cb-a4f451511d62 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.073970] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67eac6ba-bda6-4eb6-a17e-d81034f4435c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.077718] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1440.078282] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-244f3500-6a72-4f47-b36c-275ceb7f82b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.084059] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9405fa9e-b6c8-4c4b-868d-e417ad22a7c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.091191] env[63297]: DEBUG oslo_vmware.api [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1440.091191] env[63297]: value = "task-1697555" [ 1440.091191] env[63297]: _type = "Task" [ 1440.091191] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.103663] env[63297]: DEBUG nova.compute.provider_tree [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1440.113948] env[63297]: DEBUG oslo_vmware.api [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697555, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.181133] env[63297]: DEBUG nova.network.neutron [req-2ff703c0-c48b-44ba-899b-ce7059a1a5f0 req-24c66e43-6699-4ec7-abcc-56947313ef59 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updated VIF entry in instance network info cache for port 1952432a-7339-4c5d-80fc-5dac66b659e2. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1440.181708] env[63297]: DEBUG nova.network.neutron [req-2ff703c0-c48b-44ba-899b-ce7059a1a5f0 req-24c66e43-6699-4ec7-abcc-56947313ef59 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updating instance_info_cache with network_info: [{"id": "1952432a-7339-4c5d-80fc-5dac66b659e2", "address": "fa:16:3e:80:2c:fe", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1952432a-73", "ovs_interfaceid": "1952432a-7339-4c5d-80fc-5dac66b659e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.558873] env[63297]: DEBUG nova.network.neutron [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1440.600730] env[63297]: DEBUG oslo_vmware.api [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697555, 'name': PowerOffVM_Task, 'duration_secs': 0.322169} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.601015] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1440.601312] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1440.601600] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e4c319c-d853-4b2d-a86a-14032269c526 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.611252] env[63297]: DEBUG nova.scheduler.client.report [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1440.686053] env[63297]: DEBUG oslo_concurrency.lockutils [req-2ff703c0-c48b-44ba-899b-ce7059a1a5f0 req-24c66e43-6699-4ec7-abcc-56947313ef59 service nova] Releasing lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.760078] env[63297]: DEBUG nova.network.neutron [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Updating instance_info_cache with network_info: [{"id": "8b35ba63-0efc-493d-a8be-caa8cd5b2e21", "address": "fa:16:3e:b9:1a:cd", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b35ba63-0e", "ovs_interfaceid": "8b35ba63-0efc-493d-a8be-caa8cd5b2e21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.794165] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1440.796223] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1440.796223] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleting the datastore file [datastore1] f429dd9b-be6c-4e90-876b-3a3931fb1c4a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1440.796223] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46a29e99-8d5f-4d9c-bb8e-f0c7f50b4b83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.807125] env[63297]: DEBUG oslo_vmware.api [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for the task: (returnval){ [ 1440.807125] env[63297]: value = "task-1697557" [ 1440.807125] env[63297]: _type = "Task" [ 1440.807125] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.812263] env[63297]: DEBUG oslo_vmware.api [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.117604] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.118832] env[63297]: DEBUG nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1441.122760] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.551s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.124289] env[63297]: INFO nova.compute.claims [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1441.255155] env[63297]: DEBUG nova.compute.manager [req-7e077456-2bed-455a-9b92-d08e4c5fbff4 req-feac34b8-37a6-46ab-bfa2-86f0f1468538 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Received event network-changed-bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1441.255403] env[63297]: DEBUG nova.compute.manager [req-7e077456-2bed-455a-9b92-d08e4c5fbff4 req-feac34b8-37a6-46ab-bfa2-86f0f1468538 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Refreshing instance network info cache due to event network-changed-bc6f8a91-4b3e-4532-be2b-27a1ebd119aa. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1441.255689] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e077456-2bed-455a-9b92-d08e4c5fbff4 req-feac34b8-37a6-46ab-bfa2-86f0f1468538 service nova] Acquiring lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.255862] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e077456-2bed-455a-9b92-d08e4c5fbff4 req-feac34b8-37a6-46ab-bfa2-86f0f1468538 service nova] Acquired lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.258039] env[63297]: DEBUG nova.network.neutron [req-7e077456-2bed-455a-9b92-d08e4c5fbff4 req-feac34b8-37a6-46ab-bfa2-86f0f1468538 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Refreshing network info cache for port bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1441.263687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "refresh_cache-708d1907-1619-4aa4-b0b3-ae58f046a760" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.263965] env[63297]: DEBUG nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Instance network_info: |[{"id": "8b35ba63-0efc-493d-a8be-caa8cd5b2e21", "address": "fa:16:3e:b9:1a:cd", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b35ba63-0e", "ovs_interfaceid": "8b35ba63-0efc-493d-a8be-caa8cd5b2e21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1441.264487] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:1a:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b35ba63-0efc-493d-a8be-caa8cd5b2e21', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1441.272872] env[63297]: DEBUG oslo.service.loopingcall [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1441.273638] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1441.274233] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a2f009b-79c0-4f1c-a06c-b4ff32c44a17 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.298182] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1441.298182] env[63297]: value = "task-1697558" [ 1441.298182] env[63297]: _type = "Task" [ 1441.298182] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.307769] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697558, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.317171] env[63297]: DEBUG oslo_vmware.api [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Task: {'id': task-1697557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141739} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.317564] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1441.317634] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1441.317817] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1441.317990] env[63297]: INFO nova.compute.manager [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1441.318248] env[63297]: DEBUG oslo.service.loopingcall [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1441.318775] env[63297]: DEBUG nova.compute.manager [-] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1441.318875] env[63297]: DEBUG nova.network.neutron [-] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1441.500744] env[63297]: DEBUG nova.compute.manager [req-bfa3e2ba-213f-494f-a151-52360c40d67a req-fc8e2f53-1da4-4dc2-8079-d28b829ab8d4 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Received event network-changed-8b35ba63-0efc-493d-a8be-caa8cd5b2e21 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1441.501012] env[63297]: DEBUG nova.compute.manager [req-bfa3e2ba-213f-494f-a151-52360c40d67a req-fc8e2f53-1da4-4dc2-8079-d28b829ab8d4 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Refreshing instance network info cache due to event network-changed-8b35ba63-0efc-493d-a8be-caa8cd5b2e21. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1441.501244] env[63297]: DEBUG oslo_concurrency.lockutils [req-bfa3e2ba-213f-494f-a151-52360c40d67a req-fc8e2f53-1da4-4dc2-8079-d28b829ab8d4 service nova] Acquiring lock "refresh_cache-708d1907-1619-4aa4-b0b3-ae58f046a760" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.501393] env[63297]: DEBUG oslo_concurrency.lockutils [req-bfa3e2ba-213f-494f-a151-52360c40d67a req-fc8e2f53-1da4-4dc2-8079-d28b829ab8d4 service nova] Acquired lock "refresh_cache-708d1907-1619-4aa4-b0b3-ae58f046a760" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.501547] env[63297]: DEBUG nova.network.neutron [req-bfa3e2ba-213f-494f-a151-52360c40d67a req-fc8e2f53-1da4-4dc2-8079-d28b829ab8d4 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Refreshing network info cache for port 8b35ba63-0efc-493d-a8be-caa8cd5b2e21 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1441.631043] env[63297]: DEBUG nova.compute.utils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1441.635320] env[63297]: DEBUG nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1441.635320] env[63297]: DEBUG nova.network.neutron [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1441.718628] env[63297]: DEBUG nova.policy [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e10106c41d7b493e8389f80ab08d648e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29c5c1c260474315a1a34b83a8054983', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1441.810033] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697558, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.097162] env[63297]: DEBUG nova.network.neutron [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Successfully created port: 3e1fce64-30c4-4df1-b40f-3c1c3c717df4 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1442.136616] env[63297]: DEBUG nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1442.188782] env[63297]: DEBUG nova.network.neutron [-] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.316385] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697558, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.401804] env[63297]: DEBUG nova.network.neutron [req-7e077456-2bed-455a-9b92-d08e4c5fbff4 req-feac34b8-37a6-46ab-bfa2-86f0f1468538 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Updated VIF entry in instance network info cache for port bc6f8a91-4b3e-4532-be2b-27a1ebd119aa. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.402233] env[63297]: DEBUG nova.network.neutron [req-7e077456-2bed-455a-9b92-d08e4c5fbff4 req-feac34b8-37a6-46ab-bfa2-86f0f1468538 service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Updating instance_info_cache with network_info: [{"id": "bc6f8a91-4b3e-4532-be2b-27a1ebd119aa", "address": "fa:16:3e:1c:79:55", "network": {"id": "bfd15893-9bb1-46a3-bf31-db474ed0269a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1591634149-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734a95312d7d4da38201457d4f542a9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc6f8a91-4b", "ovs_interfaceid": "bc6f8a91-4b3e-4532-be2b-27a1ebd119aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.571350] env[63297]: DEBUG nova.network.neutron [req-bfa3e2ba-213f-494f-a151-52360c40d67a req-fc8e2f53-1da4-4dc2-8079-d28b829ab8d4 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Updated VIF entry in instance network info cache for port 8b35ba63-0efc-493d-a8be-caa8cd5b2e21. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.571702] env[63297]: DEBUG nova.network.neutron [req-bfa3e2ba-213f-494f-a151-52360c40d67a req-fc8e2f53-1da4-4dc2-8079-d28b829ab8d4 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Updating instance_info_cache with network_info: [{"id": "8b35ba63-0efc-493d-a8be-caa8cd5b2e21", "address": "fa:16:3e:b9:1a:cd", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b35ba63-0e", "ovs_interfaceid": "8b35ba63-0efc-493d-a8be-caa8cd5b2e21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.668030] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85207b8d-39c7-43a2-9a95-4b15fbfc240e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.678966] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59afae4-dae5-4790-af6f-5f30afdec69e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.712292] env[63297]: INFO nova.compute.manager [-] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Took 1.39 seconds to deallocate network for instance. [ 1442.715660] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b59f95-4c03-43e6-9c6b-a696d3d4e248 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.731218] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556fd82d-56e7-44c0-8b0a-a1ffeafafd52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.747939] env[63297]: DEBUG nova.compute.provider_tree [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1442.810455] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697558, 'name': CreateVM_Task, 'duration_secs': 1.456631} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.810630] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1442.811308] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.811469] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.811794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1442.812056] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9f9d260-a69b-44fa-bcf1-c49468209e26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.817197] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1442.817197] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ee69a0-57c7-2454-b1ee-45fce031fd75" [ 1442.817197] env[63297]: _type = "Task" [ 1442.817197] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.827303] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ee69a0-57c7-2454-b1ee-45fce031fd75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.906657] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e077456-2bed-455a-9b92-d08e4c5fbff4 req-feac34b8-37a6-46ab-bfa2-86f0f1468538 service nova] Releasing lock "refresh_cache-b261c90f-642d-42b7-8b79-d87eeaf0537a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.075765] env[63297]: DEBUG oslo_concurrency.lockutils [req-bfa3e2ba-213f-494f-a151-52360c40d67a req-fc8e2f53-1da4-4dc2-8079-d28b829ab8d4 service nova] Releasing lock "refresh_cache-708d1907-1619-4aa4-b0b3-ae58f046a760" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.153822] env[63297]: DEBUG nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1443.181278] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1443.181516] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1443.181678] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1443.181861] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1443.182013] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1443.182170] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1443.182377] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1443.182561] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1443.182736] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1443.182898] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1443.183082] env[63297]: DEBUG nova.virt.hardware [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1443.183653] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquiring lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.183866] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.185450] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3697457-3e8f-4584-b88e-a20bfba3dff5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.195339] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe1139c-a490-4ee6-959e-dfabd0c48157 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.223582] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.251543] env[63297]: DEBUG nova.scheduler.client.report [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1443.328762] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ee69a0-57c7-2454-b1ee-45fce031fd75, 'name': SearchDatastore_Task, 'duration_secs': 0.009156} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.329091] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.329330] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1443.329592] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.329705] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.329879] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1443.330174] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c62e896-c212-4355-b1f2-56e4021ce917 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.339775] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1443.339956] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1443.340674] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69a69c75-fb13-4349-b283-cc9c2e643a6e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.345886] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1443.345886] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8f122-b6dc-0025-e3ce-b3e4c9e5b7ea" [ 1443.345886] env[63297]: _type = "Task" [ 1443.345886] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.353696] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8f122-b6dc-0025-e3ce-b3e4c9e5b7ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.588495] env[63297]: DEBUG nova.compute.manager [req-08baadbd-0f41-4d2c-85eb-8ecd352ce88c req-d35ec3b3-f7c9-40ba-aebe-9d0a0039f161 service nova] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Received event network-vif-deleted-80a2f749-1281-4f8d-853e-5d5bca529aa3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1443.689640] env[63297]: DEBUG nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1443.756917] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.757449] env[63297]: DEBUG nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1443.759966] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.945s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.760211] env[63297]: DEBUG nova.objects.instance [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lazy-loading 'resources' on Instance uuid b65e8c04-df55-491e-861c-8aa6def8c9be {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1443.768453] env[63297]: DEBUG nova.compute.manager [req-83518380-5013-465b-9f8b-d2e3c245f240 req-2283c375-6bc2-446b-8fc0-07e012e9da53 service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Received event network-vif-plugged-3e1fce64-30c4-4df1-b40f-3c1c3c717df4 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1443.768453] env[63297]: DEBUG oslo_concurrency.lockutils [req-83518380-5013-465b-9f8b-d2e3c245f240 req-2283c375-6bc2-446b-8fc0-07e012e9da53 service nova] Acquiring lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.768453] env[63297]: DEBUG oslo_concurrency.lockutils [req-83518380-5013-465b-9f8b-d2e3c245f240 req-2283c375-6bc2-446b-8fc0-07e012e9da53 service nova] Lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.768453] env[63297]: DEBUG oslo_concurrency.lockutils [req-83518380-5013-465b-9f8b-d2e3c245f240 req-2283c375-6bc2-446b-8fc0-07e012e9da53 service nova] Lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.768453] env[63297]: DEBUG nova.compute.manager [req-83518380-5013-465b-9f8b-d2e3c245f240 req-2283c375-6bc2-446b-8fc0-07e012e9da53 service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] No waiting events found dispatching network-vif-plugged-3e1fce64-30c4-4df1-b40f-3c1c3c717df4 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1443.768453] env[63297]: WARNING nova.compute.manager [req-83518380-5013-465b-9f8b-d2e3c245f240 req-2283c375-6bc2-446b-8fc0-07e012e9da53 service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Received unexpected event network-vif-plugged-3e1fce64-30c4-4df1-b40f-3c1c3c717df4 for instance with vm_state building and task_state spawning. [ 1443.825704] env[63297]: DEBUG nova.network.neutron [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Successfully updated port: 3e1fce64-30c4-4df1-b40f-3c1c3c717df4 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1443.856851] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8f122-b6dc-0025-e3ce-b3e4c9e5b7ea, 'name': SearchDatastore_Task, 'duration_secs': 0.008356} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.857753] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-068f5df4-1962-4447-9b20-b28824adcc9b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.863773] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1443.863773] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525edc25-ca6a-b9d6-f537-73acc6313311" [ 1443.863773] env[63297]: _type = "Task" [ 1443.863773] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.871457] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525edc25-ca6a-b9d6-f537-73acc6313311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.216397] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.268028] env[63297]: DEBUG nova.compute.utils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1444.270021] env[63297]: DEBUG nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1444.270021] env[63297]: DEBUG nova.network.neutron [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1444.328510] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.328674] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.328828] env[63297]: DEBUG nova.network.neutron [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1444.374612] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525edc25-ca6a-b9d6-f537-73acc6313311, 'name': SearchDatastore_Task, 'duration_secs': 0.009152} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.374923] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.375194] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 708d1907-1619-4aa4-b0b3-ae58f046a760/708d1907-1619-4aa4-b0b3-ae58f046a760.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1444.375450] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5cf55ad-0b1f-4ba7-a433-ca9f822679c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.383815] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1444.383815] env[63297]: value = "task-1697559" [ 1444.383815] env[63297]: _type = "Task" [ 1444.383815] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.391226] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697559, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.491638] env[63297]: DEBUG nova.policy [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b3ec3a9fc5544ed864d57a099684bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2deadaf96df7430aba8594c7f98facd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1444.776180] env[63297]: DEBUG nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1444.783877] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1850bafb-410d-42aa-8639-7cfa80151026 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.796644] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8ceb18-0bb3-4dda-90cd-14cd5b035964 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.846569] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dece5c-78ee-4d10-aa46-f10f5b4e064e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.857457] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0d1d9b-cab1-4bb8-b746-18bd31e6785f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.876215] env[63297]: DEBUG nova.compute.provider_tree [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.880663] env[63297]: DEBUG nova.network.neutron [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1444.893712] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697559, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506309} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.894082] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 708d1907-1619-4aa4-b0b3-ae58f046a760/708d1907-1619-4aa4-b0b3-ae58f046a760.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1444.894411] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1444.894778] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2eaf2dc2-89ae-4317-b051-97de8c9d3abb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.902970] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1444.902970] env[63297]: value = "task-1697560" [ 1444.902970] env[63297]: _type = "Task" [ 1444.902970] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.910878] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697560, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.941786] env[63297]: DEBUG nova.network.neutron [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Successfully created port: f15b3b92-ed83-4487-b142-801fa9f72581 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1445.031644] env[63297]: DEBUG nova.network.neutron [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Updating instance_info_cache with network_info: [{"id": "3e1fce64-30c4-4df1-b40f-3c1c3c717df4", "address": "fa:16:3e:23:0f:6b", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e1fce64-30", "ovs_interfaceid": "3e1fce64-30c4-4df1-b40f-3c1c3c717df4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.379451] env[63297]: DEBUG nova.scheduler.client.report [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1445.412173] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697560, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072685} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.412428] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1445.413224] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d773ff53-6192-48db-ba44-8ca083572c00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.436770] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 708d1907-1619-4aa4-b0b3-ae58f046a760/708d1907-1619-4aa4-b0b3-ae58f046a760.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1445.437427] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b014394-5359-4977-80db-4824bad21139 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.458691] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1445.458691] env[63297]: value = "task-1697561" [ 1445.458691] env[63297]: _type = "Task" [ 1445.458691] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.467379] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697561, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.534325] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.534748] env[63297]: DEBUG nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Instance network_info: |[{"id": "3e1fce64-30c4-4df1-b40f-3c1c3c717df4", "address": "fa:16:3e:23:0f:6b", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e1fce64-30", "ovs_interfaceid": "3e1fce64-30c4-4df1-b40f-3c1c3c717df4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1445.535801] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:0f:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0c0b05e-6d10-474c-9173-4c8f1dacac9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e1fce64-30c4-4df1-b40f-3c1c3c717df4', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.543853] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Creating folder: Project (29c5c1c260474315a1a34b83a8054983). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1445.545252] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55c461f9-bf7a-4345-9e1d-5f2ed688efc2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.555648] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Created folder: Project (29c5c1c260474315a1a34b83a8054983) in parent group-v353718. [ 1445.555888] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Creating folder: Instances. Parent ref: group-v353908. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1445.556164] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aeb6f330-b9fc-4762-af9e-9a0220b05e30 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.567570] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Created folder: Instances in parent group-v353908. [ 1445.567570] env[63297]: DEBUG oslo.service.loopingcall [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1445.567570] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1445.567752] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5444c29-d823-47ec-9ce5-7d772b0d704a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.587467] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.587467] env[63297]: value = "task-1697564" [ 1445.587467] env[63297]: _type = "Task" [ 1445.587467] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.595380] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697564, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.795677] env[63297]: DEBUG nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1445.823655] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1445.823877] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1445.824039] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1445.824216] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1445.824355] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1445.824492] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1445.825491] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1445.825491] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1445.825491] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1445.825491] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1445.825491] env[63297]: DEBUG nova.virt.hardware [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1445.826340] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecf0ac2-960b-493c-a064-1ca1df379219 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.835581] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4e02fb-3af8-4555-a1fb-899997748726 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.886777] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.890249] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.738s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.890433] env[63297]: DEBUG nova.objects.instance [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1445.894248] env[63297]: DEBUG nova.compute.manager [req-27deaa94-5e8b-4430-80ac-daaaf0854f56 req-544d3b78-9fb2-4852-90dc-d040c625381c service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Received event network-changed-3e1fce64-30c4-4df1-b40f-3c1c3c717df4 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1445.894792] env[63297]: DEBUG nova.compute.manager [req-27deaa94-5e8b-4430-80ac-daaaf0854f56 req-544d3b78-9fb2-4852-90dc-d040c625381c service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Refreshing instance network info cache due to event network-changed-3e1fce64-30c4-4df1-b40f-3c1c3c717df4. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1445.894792] env[63297]: DEBUG oslo_concurrency.lockutils [req-27deaa94-5e8b-4430-80ac-daaaf0854f56 req-544d3b78-9fb2-4852-90dc-d040c625381c service nova] Acquiring lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.894908] env[63297]: DEBUG oslo_concurrency.lockutils [req-27deaa94-5e8b-4430-80ac-daaaf0854f56 req-544d3b78-9fb2-4852-90dc-d040c625381c service nova] Acquired lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.895061] env[63297]: DEBUG nova.network.neutron [req-27deaa94-5e8b-4430-80ac-daaaf0854f56 req-544d3b78-9fb2-4852-90dc-d040c625381c service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Refreshing network info cache for port 3e1fce64-30c4-4df1-b40f-3c1c3c717df4 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1445.913715] env[63297]: INFO nova.scheduler.client.report [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Deleted allocations for instance b65e8c04-df55-491e-861c-8aa6def8c9be [ 1445.969392] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697561, 'name': ReconfigVM_Task, 'duration_secs': 0.329899} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.970639] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 708d1907-1619-4aa4-b0b3-ae58f046a760/708d1907-1619-4aa4-b0b3-ae58f046a760.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1445.970924] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ce25698-e3f8-4666-86b6-471b3da326fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.978786] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1445.978786] env[63297]: value = "task-1697565" [ 1445.978786] env[63297]: _type = "Task" [ 1445.978786] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.989175] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697565, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.098767] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697564, 'name': CreateVM_Task, 'duration_secs': 0.403352} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.098948] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1446.099649] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.099849] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.100161] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1446.100402] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3487ff96-ce4b-4a3e-bb92-61c1ebaa86d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.105827] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1446.105827] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52acd301-ff15-541f-cb2e-33bf90f7104d" [ 1446.105827] env[63297]: _type = "Task" [ 1446.105827] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.114074] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52acd301-ff15-541f-cb2e-33bf90f7104d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.420434] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f76a99fb-cbd6-4845-810e-51b727f03f8e tempest-FloatingIPsAssociationTestJSON-669159493 tempest-FloatingIPsAssociationTestJSON-669159493-project-member] Lock "b65e8c04-df55-491e-861c-8aa6def8c9be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.853s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.498285] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697565, 'name': Rename_Task, 'duration_secs': 0.388591} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.500815] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1446.505050] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7d9d9a2-a1fd-4bbd-85b4-b72be8b23492 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.514774] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1446.514774] env[63297]: value = "task-1697566" [ 1446.514774] env[63297]: _type = "Task" [ 1446.514774] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.528021] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.616301] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52acd301-ff15-541f-cb2e-33bf90f7104d, 'name': SearchDatastore_Task, 'duration_secs': 0.015646} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.616616] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.616847] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1446.617131] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.617234] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.617409] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1446.617676] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8745f3e4-2c5b-48a0-849a-1446559e2a3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.633601] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1446.633790] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1446.634567] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b5ecd3c-5221-4bb4-8db5-91136797d8bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.639961] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1446.639961] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520fbf0b-6ead-4a3f-0e71-c0e341808cdb" [ 1446.639961] env[63297]: _type = "Task" [ 1446.639961] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.647666] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520fbf0b-6ead-4a3f-0e71-c0e341808cdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.667428] env[63297]: DEBUG nova.network.neutron [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Successfully updated port: f15b3b92-ed83-4487-b142-801fa9f72581 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1446.669381] env[63297]: DEBUG nova.network.neutron [req-27deaa94-5e8b-4430-80ac-daaaf0854f56 req-544d3b78-9fb2-4852-90dc-d040c625381c service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Updated VIF entry in instance network info cache for port 3e1fce64-30c4-4df1-b40f-3c1c3c717df4. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1446.669715] env[63297]: DEBUG nova.network.neutron [req-27deaa94-5e8b-4430-80ac-daaaf0854f56 req-544d3b78-9fb2-4852-90dc-d040c625381c service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Updating instance_info_cache with network_info: [{"id": "3e1fce64-30c4-4df1-b40f-3c1c3c717df4", "address": "fa:16:3e:23:0f:6b", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e1fce64-30", "ovs_interfaceid": "3e1fce64-30c4-4df1-b40f-3c1c3c717df4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.906814] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ed8fd6a7-be5a-463f-bc77-55f3488d4e08 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.908139] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.725s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.028945] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697566, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.155880] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520fbf0b-6ead-4a3f-0e71-c0e341808cdb, 'name': SearchDatastore_Task, 'duration_secs': 0.028389} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.156846] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf7bbdc3-681a-4f55-a48f-0191786cef9c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.164151] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1447.164151] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52482828-49a6-d9e8-0480-864e803356e2" [ 1447.164151] env[63297]: _type = "Task" [ 1447.164151] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.177021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "refresh_cache-c05a249e-ab88-41f0-81f5-b644b3da5d2d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.177171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquired lock "refresh_cache-c05a249e-ab88-41f0-81f5-b644b3da5d2d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.177276] env[63297]: DEBUG nova.network.neutron [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1447.178574] env[63297]: DEBUG oslo_concurrency.lockutils [req-27deaa94-5e8b-4430-80ac-daaaf0854f56 req-544d3b78-9fb2-4852-90dc-d040c625381c service nova] Releasing lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.178975] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52482828-49a6-d9e8-0480-864e803356e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.386635] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c69a59-b835-4a8a-9109-e2f1d8d47042 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.394934] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c763dfa-39a1-4852-9287-e1b0963f197c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.426695] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5049063c-0017-4bb3-91c2-f3e207a2c70b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.434150] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcef8fe-5b92-4c98-9be3-8a0a93b008e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.448644] env[63297]: DEBUG nova.compute.provider_tree [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.526999] env[63297]: DEBUG oslo_vmware.api [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697566, 'name': PowerOnVM_Task, 'duration_secs': 0.812358} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.527282] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1447.527495] env[63297]: INFO nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Took 9.14 seconds to spawn the instance on the hypervisor. [ 1447.527673] env[63297]: DEBUG nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1447.528473] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde0cb9d-3651-4fd3-bc7c-481133f09392 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.680867] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52482828-49a6-d9e8-0480-864e803356e2, 'name': SearchDatastore_Task, 'duration_secs': 0.024272} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.683975] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.684544] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52/89c9cd40-585e-4ae6-88b3-1a33a94c3b52.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1447.684898] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efa0ddf7-2468-400e-884a-b46332c1a948 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.693086] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1447.693086] env[63297]: value = "task-1697567" [ 1447.693086] env[63297]: _type = "Task" [ 1447.693086] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.706475] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.739327] env[63297]: DEBUG nova.network.neutron [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1447.916316] env[63297]: DEBUG nova.network.neutron [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Updating instance_info_cache with network_info: [{"id": "f15b3b92-ed83-4487-b142-801fa9f72581", "address": "fa:16:3e:f9:8d:b4", "network": {"id": "43d5b50a-9475-40f0-ac20-45a93157aa57", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-64163963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2deadaf96df7430aba8594c7f98facd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf15b3b92-ed", "ovs_interfaceid": "f15b3b92-ed83-4487-b142-801fa9f72581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.952861] env[63297]: DEBUG nova.scheduler.client.report [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1448.028777] env[63297]: DEBUG nova.compute.manager [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Received event network-vif-plugged-f15b3b92-ed83-4487-b142-801fa9f72581 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.029111] env[63297]: DEBUG oslo_concurrency.lockutils [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] Acquiring lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.029430] env[63297]: DEBUG oslo_concurrency.lockutils [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] Lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.029657] env[63297]: DEBUG oslo_concurrency.lockutils [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] Lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.029849] env[63297]: DEBUG nova.compute.manager [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] No waiting events found dispatching network-vif-plugged-f15b3b92-ed83-4487-b142-801fa9f72581 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1448.030030] env[63297]: WARNING nova.compute.manager [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Received unexpected event network-vif-plugged-f15b3b92-ed83-4487-b142-801fa9f72581 for instance with vm_state building and task_state spawning. [ 1448.030232] env[63297]: DEBUG nova.compute.manager [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Received event network-changed-f15b3b92-ed83-4487-b142-801fa9f72581 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.030493] env[63297]: DEBUG nova.compute.manager [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Refreshing instance network info cache due to event network-changed-f15b3b92-ed83-4487-b142-801fa9f72581. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1448.031032] env[63297]: DEBUG oslo_concurrency.lockutils [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] Acquiring lock "refresh_cache-c05a249e-ab88-41f0-81f5-b644b3da5d2d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.047492] env[63297]: INFO nova.compute.manager [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Took 57.37 seconds to build instance. [ 1448.205896] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697567, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.419051] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Releasing lock "refresh_cache-c05a249e-ab88-41f0-81f5-b644b3da5d2d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.419426] env[63297]: DEBUG nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Instance network_info: |[{"id": "f15b3b92-ed83-4487-b142-801fa9f72581", "address": "fa:16:3e:f9:8d:b4", "network": {"id": "43d5b50a-9475-40f0-ac20-45a93157aa57", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-64163963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2deadaf96df7430aba8594c7f98facd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf15b3b92-ed", "ovs_interfaceid": "f15b3b92-ed83-4487-b142-801fa9f72581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1448.419744] env[63297]: DEBUG oslo_concurrency.lockutils [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] Acquired lock "refresh_cache-c05a249e-ab88-41f0-81f5-b644b3da5d2d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.420653] env[63297]: DEBUG nova.network.neutron [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Refreshing network info cache for port f15b3b92-ed83-4487-b142-801fa9f72581 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1448.421505] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:8d:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f15b3b92-ed83-4487-b142-801fa9f72581', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1448.432988] env[63297]: DEBUG oslo.service.loopingcall [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1448.435735] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1448.436487] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a178e7bf-2970-4945-9287-92bc4632282c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.457280] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1448.457280] env[63297]: value = "task-1697568" [ 1448.457280] env[63297]: _type = "Task" [ 1448.457280] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.458282] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.550s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.458472] env[63297]: INFO nova.compute.manager [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Successfully reverted task state from image_uploading on failure for instance. [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server [None req-0fa58e8d-049a-4765-b34f-3f3ee737937a tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-353882' has already been deleted or has not been completely created [ 1448.465853] env[63297]: Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-353882' has already been deleted or has not been completely created' [ 1448.465853] env[63297]: Faults: [ManagedObjectNotFound] [ 1448.465853] env[63297]: Details: {'obj': 'snapshot-353882'} [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server response = request(managed_object, **kwargs) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__ [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server return client.invoke(args, kwargs) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server result = self.send(soapenv, timeout=timeout) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server return self.process_reply(reply.message, None, None) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server raise WebFault(fault, replyroot) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server suds.WebFault: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-353882' has already been deleted or has not been completely created' [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server return api_method(*args, **kwargs) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server raise exceptions.VimFaultException(fault_list, fault_string, [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.VimFaultException: The object 'vim.vm.Snapshot:snapshot-353882' has already been deleted or has not been completely created [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-353882' has already been deleted or has not been completely created' [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-353882'} [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server raise self.value [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server raise self.value [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1448.465853] env[63297]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server raise self.value [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server raise self.value [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4443, in snapshot_instance [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4476, in _snapshot_instance [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 571, in snapshot [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1033, in snapshot [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 933, in _delete_vm_snapshot [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server delete_snapshot_task = self._session._call_method( [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 127, in _call_method [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception() as ctxt: [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server raise self.value [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 125, in _call_method [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server return self.invoke_api(module, method, *args, **kwargs) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server return _invoke_api(module, method, *args, **kwargs) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server idle = self.f(*self.args, **self.kw) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api [ 1448.467252] env[63297]: ERROR oslo_messaging.rpc.server raise clazz(str(excep), [ 1448.468779] env[63297]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-353882' has already been deleted or has not been completely created [ 1448.468779] env[63297]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-353882' has already been deleted or has not been completely created' [ 1448.468779] env[63297]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 1448.468779] env[63297]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-353882'} [ 1448.468779] env[63297]: ERROR oslo_messaging.rpc.server [ 1448.468779] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.269s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.468779] env[63297]: DEBUG nova.objects.instance [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lazy-loading 'resources' on Instance uuid 6ce88b93-aa42-4f34-81fa-6c09c23ace81 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1448.473719] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697568, 'name': CreateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.550348] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c8b4d6b-5114-43cf-8bf4-8c108d0b4ccd tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "708d1907-1619-4aa4-b0b3-ae58f046a760" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.991s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.664444] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "fba9040d-f904-44a1-8785-14d4696ea939" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.664444] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.664602] env[63297]: INFO nova.compute.manager [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Shelving [ 1448.706321] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697567, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52998} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.708958] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52/89c9cd40-585e-4ae6-88b3-1a33a94c3b52.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1448.708958] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1448.709892] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40f3c8d7-57d7-4e1b-a920-5217f6310a69 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.718040] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1448.718040] env[63297]: value = "task-1697569" [ 1448.718040] env[63297]: _type = "Task" [ 1448.718040] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.728639] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697569, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.775328] env[63297]: DEBUG nova.network.neutron [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Updated VIF entry in instance network info cache for port f15b3b92-ed83-4487-b142-801fa9f72581. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1448.775723] env[63297]: DEBUG nova.network.neutron [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Updating instance_info_cache with network_info: [{"id": "f15b3b92-ed83-4487-b142-801fa9f72581", "address": "fa:16:3e:f9:8d:b4", "network": {"id": "43d5b50a-9475-40f0-ac20-45a93157aa57", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-64163963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2deadaf96df7430aba8594c7f98facd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf15b3b92-ed", "ovs_interfaceid": "f15b3b92-ed83-4487-b142-801fa9f72581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.968101] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697568, 'name': CreateVM_Task, 'duration_secs': 0.360806} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.968309] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1448.969024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.969230] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.969697] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1448.969986] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b326ed94-8830-4b44-95cf-4d79ce45b7b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.976759] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1448.976759] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bcf369-5117-6ef1-cbb1-95696b999db5" [ 1448.976759] env[63297]: _type = "Task" [ 1448.976759] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.985037] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bcf369-5117-6ef1-cbb1-95696b999db5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.174496] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1449.174930] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b7d17dc-d16c-4ac7-b611-c91a93f826a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.182037] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1449.182037] env[63297]: value = "task-1697570" [ 1449.182037] env[63297]: _type = "Task" [ 1449.182037] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.192582] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.230105] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697569, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070218} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.230297] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1449.233396] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71df4af5-288a-4feb-9207-5b843e79469b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.259445] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52/89c9cd40-585e-4ae6-88b3-1a33a94c3b52.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1449.261568] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16a64d5f-4753-4209-8fdd-a8141ce9473d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.278107] env[63297]: DEBUG oslo_concurrency.lockutils [req-b68b6aa6-cf20-47d3-9325-2506664b49ee req-fd5e6e18-9180-4f6d-9a1f-e8dd8ee5b8ec service nova] Releasing lock "refresh_cache-c05a249e-ab88-41f0-81f5-b644b3da5d2d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.284862] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1449.284862] env[63297]: value = "task-1697571" [ 1449.284862] env[63297]: _type = "Task" [ 1449.284862] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.295620] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697571, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.445992] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca61636c-aaa0-41f4-95e8-761882628254 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.454091] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3af9a87-b152-424e-9ae6-2545d224d643 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.487432] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2638f1-d1e2-4c69-bb23-49aadd959a14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.495311] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "708d1907-1619-4aa4-b0b3-ae58f046a760" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.495553] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "708d1907-1619-4aa4-b0b3-ae58f046a760" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.495786] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "708d1907-1619-4aa4-b0b3-ae58f046a760-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.496013] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "708d1907-1619-4aa4-b0b3-ae58f046a760-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.496193] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "708d1907-1619-4aa4-b0b3-ae58f046a760-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.497816] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bcf369-5117-6ef1-cbb1-95696b999db5, 'name': SearchDatastore_Task, 'duration_secs': 0.030601} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.500154] env[63297]: INFO nova.compute.manager [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Terminating instance [ 1449.501548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.501770] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1449.502091] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.502172] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.502378] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1449.502932] env[63297]: DEBUG nova.compute.manager [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1449.503129] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1449.503353] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10a8f165-8732-4935-b9b2-42dbcb211c4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.506209] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fc9f64-893e-473e-8a29-1896d8f38f0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.510482] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1286110-2ccb-422f-9587-65f793c9ded7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.523655] env[63297]: DEBUG nova.compute.provider_tree [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1449.527015] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1449.527457] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-602dcdcd-0ac5-4065-b1a8-f4b7fd8e49b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.529823] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1449.529997] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1449.531095] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7cce56d-4c02-4684-857f-11a13cf1f6d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.536052] env[63297]: DEBUG oslo_vmware.api [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1449.536052] env[63297]: value = "task-1697572" [ 1449.536052] env[63297]: _type = "Task" [ 1449.536052] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.538205] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1449.538205] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233fd60-e453-8912-6b35-274610aaa808" [ 1449.538205] env[63297]: _type = "Task" [ 1449.538205] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.552522] env[63297]: DEBUG oslo_vmware.api [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.552798] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233fd60-e453-8912-6b35-274610aaa808, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.693983] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697570, 'name': PowerOffVM_Task, 'duration_secs': 0.247835} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.694501] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1449.695444] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357720b9-28b0-4113-a385-de0e5fb3b90d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.718930] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6d243f-48a8-4867-a6f8-1e52bf0c4e5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.795210] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697571, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.028750] env[63297]: DEBUG nova.scheduler.client.report [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1450.048808] env[63297]: DEBUG oslo_vmware.api [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697572, 'name': PowerOffVM_Task, 'duration_secs': 0.245469} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.049477] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1450.049723] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1450.050007] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-268a1669-ba83-44f8-ac50-89202bb190e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.054854] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233fd60-e453-8912-6b35-274610aaa808, 'name': SearchDatastore_Task, 'duration_secs': 0.017946} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.056064] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66afbb19-6f82-4c2f-ac1a-225fb4eea91b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.060812] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1450.060812] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aacc15-9196-ee96-74ad-4ad7f0948159" [ 1450.060812] env[63297]: _type = "Task" [ 1450.060812] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.069111] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aacc15-9196-ee96-74ad-4ad7f0948159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.148166] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1450.148166] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1450.148166] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleting the datastore file [datastore1] 708d1907-1619-4aa4-b0b3-ae58f046a760 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1450.148411] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebbbf776-fa04-4e0b-b74d-8c1593f54a58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.156535] env[63297]: DEBUG oslo_vmware.api [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1450.156535] env[63297]: value = "task-1697574" [ 1450.156535] env[63297]: _type = "Task" [ 1450.156535] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.168299] env[63297]: DEBUG oslo_vmware.api [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.233099] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1450.233450] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c8e35a50-7668-4722-a036-7418c915926b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.242472] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1450.242472] env[63297]: value = "task-1697575" [ 1450.242472] env[63297]: _type = "Task" [ 1450.242472] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.251224] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697575, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.296211] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697571, 'name': ReconfigVM_Task, 'duration_secs': 0.58027} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.299542] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52/89c9cd40-585e-4ae6-88b3-1a33a94c3b52.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1450.300417] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19083faa-221f-4f48-85c1-daaa8553a5ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.316029] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1450.316029] env[63297]: value = "task-1697576" [ 1450.316029] env[63297]: _type = "Task" [ 1450.316029] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.324761] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697576, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.535162] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.537584] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.339s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.537727] env[63297]: DEBUG nova.objects.instance [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lazy-loading 'resources' on Instance uuid 35c68986-51b5-43ba-a076-aca3c86d68bc {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1450.576450] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aacc15-9196-ee96-74ad-4ad7f0948159, 'name': SearchDatastore_Task, 'duration_secs': 0.029574} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.578931] env[63297]: INFO nova.scheduler.client.report [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted allocations for instance 6ce88b93-aa42-4f34-81fa-6c09c23ace81 [ 1450.580602] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.580919] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c05a249e-ab88-41f0-81f5-b644b3da5d2d/c05a249e-ab88-41f0-81f5-b644b3da5d2d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1450.582308] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b87d085e-2389-4446-a89e-888a2ed87b00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.592419] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1450.592419] env[63297]: value = "task-1697577" [ 1450.592419] env[63297]: _type = "Task" [ 1450.592419] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.603396] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.665926] env[63297]: DEBUG oslo_vmware.api [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215367} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.666388] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1450.666480] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1450.666636] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1450.666838] env[63297]: INFO nova.compute.manager [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1450.667164] env[63297]: DEBUG oslo.service.loopingcall [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.667275] env[63297]: DEBUG nova.compute.manager [-] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1450.667369] env[63297]: DEBUG nova.network.neutron [-] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1450.755601] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697575, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.827965] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697576, 'name': Rename_Task, 'duration_secs': 0.164981} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.829974] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1450.832109] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e03927f-9870-4ce4-9b0a-becae92adf12 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.838289] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1450.838289] env[63297]: value = "task-1697578" [ 1450.838289] env[63297]: _type = "Task" [ 1450.838289] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.848679] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.063421] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.063739] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.093682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b3a2afe-9a66-4dbd-9aad-ea4684e3d6ac tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "6ce88b93-aa42-4f34-81fa-6c09c23ace81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.137s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.112063] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697577, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476849} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.112465] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c05a249e-ab88-41f0-81f5-b644b3da5d2d/c05a249e-ab88-41f0-81f5-b644b3da5d2d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1451.112745] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1451.113057] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1858c8a6-be86-46fc-b208-74cbca77c391 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.124059] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1451.124059] env[63297]: value = "task-1697579" [ 1451.124059] env[63297]: _type = "Task" [ 1451.124059] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.135812] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697579, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.186162] env[63297]: DEBUG nova.compute.manager [req-5c8349e4-474b-462e-b7e7-00c9f7e59e65 req-457fe827-7fbe-4a03-b00f-a90f25827449 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Received event network-vif-deleted-8b35ba63-0efc-493d-a8be-caa8cd5b2e21 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1451.186365] env[63297]: INFO nova.compute.manager [req-5c8349e4-474b-462e-b7e7-00c9f7e59e65 req-457fe827-7fbe-4a03-b00f-a90f25827449 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Neutron deleted interface 8b35ba63-0efc-493d-a8be-caa8cd5b2e21; detaching it from the instance and deleting it from the info cache [ 1451.186512] env[63297]: DEBUG nova.network.neutron [req-5c8349e4-474b-462e-b7e7-00c9f7e59e65 req-457fe827-7fbe-4a03-b00f-a90f25827449 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.256077] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697575, 'name': CreateSnapshot_Task, 'duration_secs': 0.774601} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.256077] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1451.256769] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46bd68d-d102-4274-a7d9-79c7abe62ffb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.348367] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697578, 'name': PowerOnVM_Task} progress is 92%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.546677] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05e5da1-a123-46f5-a095-5c6a2041d03f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.554899] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf7de6d-f99a-4d18-b777-1d18e02dda41 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.587380] env[63297]: INFO nova.compute.manager [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Detaching volume 1f16c2ed-7350-4649-be78-689cfc165090 [ 1451.591517] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e45aa8-b231-4a7b-ac48-c97b24da1c22 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.601050] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dbd1cc-7c5a-483b-9544-3859f83f612a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.616598] env[63297]: DEBUG nova.compute.provider_tree [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.636707] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697579, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071854} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.637054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1451.637811] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d881bf44-fc92-4743-a085-3f09dd50178d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.643033] env[63297]: INFO nova.virt.block_device [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Attempting to driver detach volume 1f16c2ed-7350-4649-be78-689cfc165090 from mountpoint /dev/sdb [ 1451.643129] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1451.643413] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353888', 'volume_id': '1f16c2ed-7350-4649-be78-689cfc165090', 'name': 'volume-1f16c2ed-7350-4649-be78-689cfc165090', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5124f7fb-1293-4964-98c4-426ecfce7d10', 'attached_at': '', 'detached_at': '', 'volume_id': '1f16c2ed-7350-4649-be78-689cfc165090', 'serial': '1f16c2ed-7350-4649-be78-689cfc165090'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1451.644155] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9f2d93-58b2-4ed7-a303-786323a92d79 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.662082] env[63297]: DEBUG nova.network.neutron [-] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.677806] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] c05a249e-ab88-41f0-81f5-b644b3da5d2d/c05a249e-ab88-41f0-81f5-b644b3da5d2d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1451.677806] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e018af1-ad75-4e6c-b8ba-6295c7732a68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.707411] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-451bc780-d115-44e3-ad61-a232ca874ce4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.711106] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb88376f-a500-464a-a170-c56fe4628440 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.716345] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1451.716345] env[63297]: value = "task-1697580" [ 1451.716345] env[63297]: _type = "Task" [ 1451.716345] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.726196] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f4fc0f-5028-419e-83b2-591ff37bb895 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.736910] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8c9ac5-c5b9-41b1-9369-9d57772bcd3a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.742755] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697580, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.777587] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3a2baa-c82b-4c69-8bad-3917cd5fc3fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.789755] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1451.790378] env[63297]: DEBUG nova.compute.manager [req-5c8349e4-474b-462e-b7e7-00c9f7e59e65 req-457fe827-7fbe-4a03-b00f-a90f25827449 service nova] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Detach interface failed, port_id=8b35ba63-0efc-493d-a8be-caa8cd5b2e21, reason: Instance 708d1907-1619-4aa4-b0b3-ae58f046a760 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1451.790985] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-51d1157f-ef6b-45e2-92b3-83cb5ad22b85 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.808552] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] The volume has not been displaced from its original location: [datastore1] volume-1f16c2ed-7350-4649-be78-689cfc165090/volume-1f16c2ed-7350-4649-be78-689cfc165090.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1451.814120] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Reconfiguring VM instance instance-0000002d to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1451.815785] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9f86fea-a298-4c28-acec-0d7df1400bcd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.829873] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1451.829873] env[63297]: value = "task-1697581" [ 1451.829873] env[63297]: _type = "Task" [ 1451.829873] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.836760] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1451.836760] env[63297]: value = "task-1697582" [ 1451.836760] env[63297]: _type = "Task" [ 1451.836760] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.841542] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697581, 'name': CloneVM_Task} progress is 12%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.848290] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697582, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.853366] env[63297]: DEBUG oslo_vmware.api [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697578, 'name': PowerOnVM_Task, 'duration_secs': 0.637637} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.853648] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1451.853854] env[63297]: INFO nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Took 8.70 seconds to spawn the instance on the hypervisor. [ 1451.854049] env[63297]: DEBUG nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1451.854870] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae3bb03-a500-4f94-98bb-390d512d1725 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.120481] env[63297]: DEBUG nova.scheduler.client.report [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1452.173525] env[63297]: INFO nova.compute.manager [-] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Took 1.51 seconds to deallocate network for instance. [ 1452.228268] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697580, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.343240] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697581, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.348579] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697582, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.374958] env[63297]: INFO nova.compute.manager [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Took 56.25 seconds to build instance. [ 1452.627861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.089s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.629121] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.383s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.632216] env[63297]: INFO nova.compute.claims [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1452.658652] env[63297]: INFO nova.scheduler.client.report [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Deleted allocations for instance 35c68986-51b5-43ba-a076-aca3c86d68bc [ 1452.685625] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.735099] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697580, 'name': ReconfigVM_Task, 'duration_secs': 0.774345} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.735410] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Reconfigured VM instance instance-00000042 to attach disk [datastore1] c05a249e-ab88-41f0-81f5-b644b3da5d2d/c05a249e-ab88-41f0-81f5-b644b3da5d2d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1452.736347] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7eeb8016-9434-439a-a4aa-84bda1f30089 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.746206] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1452.746206] env[63297]: value = "task-1697583" [ 1452.746206] env[63297]: _type = "Task" [ 1452.746206] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.759899] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697583, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.846756] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697581, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.849749] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697582, 'name': ReconfigVM_Task, 'duration_secs': 0.54069} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.850033] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Reconfigured VM instance instance-0000002d to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1452.854822] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73b02bdd-8bc4-4c08-b138-2b0eddfe7b66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.871436] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1452.871436] env[63297]: value = "task-1697584" [ 1452.871436] env[63297]: _type = "Task" [ 1452.871436] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.877979] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46abf2c3-b01a-4bca-a0ca-e17070fc8781 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.988s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.885024] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697584, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.166026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-096249f5-1c8a-46c1-9340-ed0e3959fadc tempest-VolumesAssistedSnapshotsTest-192125853 tempest-VolumesAssistedSnapshotsTest-192125853-project-member] Lock "35c68986-51b5-43ba-a076-aca3c86d68bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.576s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1453.256464] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697583, 'name': Rename_Task, 'duration_secs': 0.172912} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.256739] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1453.256990] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb964a6b-a5e7-4866-9726-1c5dc689d4af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.264925] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1453.264925] env[63297]: value = "task-1697585" [ 1453.264925] env[63297]: _type = "Task" [ 1453.264925] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.274923] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697585, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.344755] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697581, 'name': CloneVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.380403] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697584, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.780011] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697585, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.802993] env[63297]: INFO nova.compute.manager [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Rescuing [ 1453.805083] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.805083] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.805083] env[63297]: DEBUG nova.network.neutron [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1453.850127] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697581, 'name': CloneVM_Task, 'duration_secs': 1.553162} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.854028] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Created linked-clone VM from snapshot [ 1453.854028] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ca4c99-d4b2-4e73-9773-320e462e81ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.858913] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Uploading image dd69ed84-813a-498d-a635-ba1ad9182458 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1453.883426] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697584, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.892844] env[63297]: DEBUG oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1453.892844] env[63297]: value = "vm-353913" [ 1453.892844] env[63297]: _type = "VirtualMachine" [ 1453.892844] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1453.893142] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-94dc6b4b-8d97-4abb-ad3b-897e7ac39c0d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.900408] env[63297]: DEBUG oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lease: (returnval){ [ 1453.900408] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52371fc3-e1b3-1c3e-87a9-4f3a4459b7cb" [ 1453.900408] env[63297]: _type = "HttpNfcLease" [ 1453.900408] env[63297]: } obtained for exporting VM: (result){ [ 1453.900408] env[63297]: value = "vm-353913" [ 1453.900408] env[63297]: _type = "VirtualMachine" [ 1453.900408] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1453.900647] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the lease: (returnval){ [ 1453.900647] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52371fc3-e1b3-1c3e-87a9-4f3a4459b7cb" [ 1453.900647] env[63297]: _type = "HttpNfcLease" [ 1453.900647] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1453.909428] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1453.909428] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52371fc3-e1b3-1c3e-87a9-4f3a4459b7cb" [ 1453.909428] env[63297]: _type = "HttpNfcLease" [ 1453.909428] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1454.097020] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfda0ab9-70ad-4185-a83d-aed387ec876f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.105126] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4238f173-98c9-430e-8d13-ff498a3d26b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.136036] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ebbaa6-23f1-40db-b0ed-ee6b32762206 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.144230] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27269774-b06f-448a-b2cd-8a972f47f4eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.157986] env[63297]: DEBUG nova.compute.provider_tree [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1454.276752] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697585, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.381323] env[63297]: DEBUG oslo_vmware.api [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697584, 'name': ReconfigVM_Task, 'duration_secs': 1.175237} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.381617] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353888', 'volume_id': '1f16c2ed-7350-4649-be78-689cfc165090', 'name': 'volume-1f16c2ed-7350-4649-be78-689cfc165090', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5124f7fb-1293-4964-98c4-426ecfce7d10', 'attached_at': '', 'detached_at': '', 'volume_id': '1f16c2ed-7350-4649-be78-689cfc165090', 'serial': '1f16c2ed-7350-4649-be78-689cfc165090'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1454.407978] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1454.407978] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52371fc3-e1b3-1c3e-87a9-4f3a4459b7cb" [ 1454.407978] env[63297]: _type = "HttpNfcLease" [ 1454.407978] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1454.408519] env[63297]: DEBUG oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1454.408519] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52371fc3-e1b3-1c3e-87a9-4f3a4459b7cb" [ 1454.408519] env[63297]: _type = "HttpNfcLease" [ 1454.408519] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1454.409412] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f26a29-4044-4afd-8de6-15df696dc5a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.417446] env[63297]: DEBUG oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52958274-745d-40c2-1d32-f53fd55899b1/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1454.417625] env[63297]: DEBUG oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52958274-745d-40c2-1d32-f53fd55899b1/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1454.512245] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7c96d772-8947-46fc-8bcc-c53c2956f08c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.637153] env[63297]: DEBUG nova.network.neutron [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Updating instance_info_cache with network_info: [{"id": "3e1fce64-30c4-4df1-b40f-3c1c3c717df4", "address": "fa:16:3e:23:0f:6b", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e1fce64-30", "ovs_interfaceid": "3e1fce64-30c4-4df1-b40f-3c1c3c717df4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.685865] env[63297]: ERROR nova.scheduler.client.report [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [req-70749f3d-7cb1-4557-92b9-3d57ae3774a4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-70749f3d-7cb1-4557-92b9-3d57ae3774a4"}]} [ 1454.721382] env[63297]: DEBUG nova.scheduler.client.report [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1454.749089] env[63297]: DEBUG nova.scheduler.client.report [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1454.749089] env[63297]: DEBUG nova.compute.provider_tree [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1454.760927] env[63297]: DEBUG nova.scheduler.client.report [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1454.776111] env[63297]: DEBUG oslo_vmware.api [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697585, 'name': PowerOnVM_Task, 'duration_secs': 1.407666} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.776375] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1454.776574] env[63297]: INFO nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Took 8.98 seconds to spawn the instance on the hypervisor. [ 1454.776742] env[63297]: DEBUG nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1454.777568] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af187adb-0703-47e9-a8cc-cb1518c92139 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.781194] env[63297]: DEBUG nova.scheduler.client.report [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1454.940369] env[63297]: DEBUG nova.objects.instance [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1455.140101] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "refresh_cache-89c9cd40-585e-4ae6-88b3-1a33a94c3b52" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.290693] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196cff27-37f7-49b2-8186-bff458fef0e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.308407] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6fca46-ce44-4ef5-b9e3-e424058ef0c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.314356] env[63297]: INFO nova.compute.manager [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Took 53.78 seconds to build instance. [ 1455.349281] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71464d6-4994-4b56-a94e-777f6f8a2bad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.358699] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dee2a7-a860-43fe-8f8f-c57cbfe8fc6a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.375493] env[63297]: DEBUG nova.compute.provider_tree [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1455.680041] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1455.680420] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da1cb27c-fa51-493c-a4d4-67106a5671fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.689491] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1455.689491] env[63297]: value = "task-1697587" [ 1455.689491] env[63297]: _type = "Task" [ 1455.689491] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.698784] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697587, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.818369] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1b26b688-9640-45d0-97b3-f657460b7591 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.351s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.917996] env[63297]: DEBUG nova.scheduler.client.report [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1455.921018] env[63297]: DEBUG nova.compute.provider_tree [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 89 to 90 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1455.921018] env[63297]: DEBUG nova.compute.provider_tree [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1455.957862] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46baf3f9-6651-4ab8-b21c-443c959ddbb9 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.894s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.203842] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697587, 'name': PowerOffVM_Task, 'duration_secs': 0.277616} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.204334] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1456.205266] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff3ded9-4201-42a2-8be4-7a99317bfea4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.227799] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b799e2ff-fd04-4a29-a8ac-dc2776c9c7be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.266828] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1456.267491] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-775773d9-085b-4516-858c-0012656d94c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.274942] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1456.274942] env[63297]: value = "task-1697588" [ 1456.274942] env[63297]: _type = "Task" [ 1456.274942] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.285786] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1456.286010] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1456.286446] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1456.286662] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.286875] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1456.287143] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fce63fa8-0088-4ee7-b445-10f9e18a107c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.299086] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1456.299086] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1456.299086] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cb5bacb-6467-4112-8ed8-887124f05675 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.304305] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1456.304305] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eadd37-e25b-22e3-0623-bdd924c70c4e" [ 1456.304305] env[63297]: _type = "Task" [ 1456.304305] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.313831] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eadd37-e25b-22e3-0623-bdd924c70c4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.429079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.800s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.429873] env[63297]: DEBUG nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1456.437101] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.437634] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1456.438454] env[63297]: DEBUG nova.compute.manager [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1456.441647] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 45.535s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1456.441843] env[63297]: DEBUG nova.objects.instance [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1456.445828] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9d758d-5951-485f-ac98-8df5bb84cd0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.454134] env[63297]: DEBUG nova.compute.manager [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1456.454739] env[63297]: DEBUG nova.objects.instance [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1456.820668] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eadd37-e25b-22e3-0623-bdd924c70c4e, 'name': SearchDatastore_Task, 'duration_secs': 0.019148} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.825020] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f98af3-be5d-4e0b-ad80-22184cd0f060 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.826894] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1456.826894] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526db0eb-7856-2e86-86ca-4ea342765c24" [ 1456.826894] env[63297]: _type = "Task" [ 1456.826894] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.835579] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526db0eb-7856-2e86-86ca-4ea342765c24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.938518] env[63297]: DEBUG nova.compute.utils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1456.942503] env[63297]: DEBUG nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1456.942695] env[63297]: DEBUG nova.network.neutron [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1456.963252] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1456.964802] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17ffa777-6292-48e9-bd50-056188abb3ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.972181] env[63297]: DEBUG oslo_vmware.api [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1456.972181] env[63297]: value = "task-1697589" [ 1456.972181] env[63297]: _type = "Task" [ 1456.972181] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.981400] env[63297]: DEBUG oslo_vmware.api [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697589, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.022171] env[63297]: DEBUG nova.policy [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ce4e0757c584ebdb556c79d3c0bd990', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2786fb3158214107a458dc08735ebeb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1457.338011] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526db0eb-7856-2e86-86ca-4ea342765c24, 'name': SearchDatastore_Task, 'duration_secs': 0.012951} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.338314] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.338575] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. {{(pid=63297) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1457.338845] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d0e8400-d493-4aa1-b431-6fdac55280db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.346493] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1457.346493] env[63297]: value = "task-1697590" [ 1457.346493] env[63297]: _type = "Task" [ 1457.346493] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.353781] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.448181] env[63297]: DEBUG nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1457.454877] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9731d5c2-ddde-4c47-8d5b-d97a59561950 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.455664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.687s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.460468] env[63297]: DEBUG nova.network.neutron [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Successfully created port: cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1457.487520] env[63297]: DEBUG oslo_vmware.api [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697589, 'name': PowerOffVM_Task, 'duration_secs': 0.197275} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.489050] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1457.489426] env[63297]: DEBUG nova.compute.manager [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1457.491045] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5188dfa8-7c03-4892-b013-f4f64e27e2f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.861816] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697590, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.013245] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53d7579a-5599-4d91-887a-420a5089ab0c tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.575s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.037378] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1286eb-efa0-4a6c-b6ea-219addb02a91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.048388] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc32396-7203-489b-bf3b-092b5a2bfdba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.080154] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b6e3e5-dc03-4551-bd72-3520de979e2b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.087898] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2b6de5-2963-480b-93ae-dfc63b679184 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.102540] env[63297]: DEBUG nova.compute.provider_tree [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1458.182162] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.182782] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.182782] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.182934] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.183082] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.185371] env[63297]: INFO nova.compute.manager [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Terminating instance [ 1458.188659] env[63297]: DEBUG nova.compute.manager [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1458.188910] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.190053] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b89a40d-ef00-4d6e-9d3a-0e8f2bf0494b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.199347] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.200805] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cecfa990-4399-43cc-94bc-1322fbe45c99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.207108] env[63297]: DEBUG oslo_vmware.api [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1458.207108] env[63297]: value = "task-1697591" [ 1458.207108] env[63297]: _type = "Task" [ 1458.207108] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.216061] env[63297]: DEBUG oslo_vmware.api [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.357514] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697590, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615311} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.357514] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. [ 1458.358384] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546d357c-4f00-4abb-90a0-bceda2d76424 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.386369] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1458.386800] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85180b7d-5066-46cb-8d03-d2c876cc6077 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.405775] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1458.405775] env[63297]: value = "task-1697592" [ 1458.405775] env[63297]: _type = "Task" [ 1458.405775] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.414616] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697592, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.475311] env[63297]: DEBUG nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1458.509059] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1458.509382] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1458.509617] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1458.509868] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1458.510079] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1458.510329] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1458.510598] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1458.510835] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1458.511059] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1458.511304] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1458.511539] env[63297]: DEBUG nova.virt.hardware [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1458.512984] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f85dea-d3ed-49da-8af8-dbc611c2a449 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.526947] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa59a2c-2317-4eb1-bb28-ef63a5ec203a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.647099] env[63297]: DEBUG nova.scheduler.client.report [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1458.647508] env[63297]: DEBUG nova.compute.provider_tree [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 90 to 91 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1458.647851] env[63297]: DEBUG nova.compute.provider_tree [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1458.717805] env[63297]: DEBUG oslo_vmware.api [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697591, 'name': PowerOffVM_Task, 'duration_secs': 0.299953} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.718140] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1458.718333] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1458.718605] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65cda222-fa78-4c4e-836b-738404730daa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.918280] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697592, 'name': ReconfigVM_Task, 'duration_secs': 0.41909} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.918555] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1458.919660] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607a3a44-1d72-40a5-8e89-a3fca01db65b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.950047] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a73ce552-c389-4186-a227-4b51d9da640a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.960785] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1458.961821] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1458.962142] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Deleting the datastore file [datastore1] c05a249e-ab88-41f0-81f5-b644b3da5d2d {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1458.962488] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6748e12f-0015-460c-b284-2c18b7e7bef8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.972063] env[63297]: DEBUG oslo_vmware.api [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1458.972063] env[63297]: value = "task-1697594" [ 1458.972063] env[63297]: _type = "Task" [ 1458.972063] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.972581] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1458.972581] env[63297]: value = "task-1697595" [ 1458.972581] env[63297]: _type = "Task" [ 1458.972581] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.993030] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697595, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.993030] env[63297]: DEBUG oslo_vmware.api [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.072880] env[63297]: DEBUG nova.objects.instance [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.155992] env[63297]: DEBUG oslo_concurrency.lockutils [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.156277] env[63297]: INFO nova.compute.manager [None req-132b73be-170c-464d-838c-0a6cee18faa4 tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Successfully reverted task state from rebuilding on failure for instance. [ 1459.162977] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.146s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.166927] env[63297]: INFO nova.compute.claims [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1459.426167] env[63297]: DEBUG nova.compute.manager [req-f305e01f-c627-4e3b-9061-c4d526afee1a req-fd7ca8d6-709d-4f01-822d-76fb528d95a8 service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Received event network-vif-plugged-cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.426415] env[63297]: DEBUG oslo_concurrency.lockutils [req-f305e01f-c627-4e3b-9061-c4d526afee1a req-fd7ca8d6-709d-4f01-822d-76fb528d95a8 service nova] Acquiring lock "96265295-6b0c-4803-bb89-6166c9d3fc7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.426631] env[63297]: DEBUG oslo_concurrency.lockutils [req-f305e01f-c627-4e3b-9061-c4d526afee1a req-fd7ca8d6-709d-4f01-822d-76fb528d95a8 service nova] Lock "96265295-6b0c-4803-bb89-6166c9d3fc7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.427138] env[63297]: DEBUG oslo_concurrency.lockutils [req-f305e01f-c627-4e3b-9061-c4d526afee1a req-fd7ca8d6-709d-4f01-822d-76fb528d95a8 service nova] Lock "96265295-6b0c-4803-bb89-6166c9d3fc7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.427379] env[63297]: DEBUG nova.compute.manager [req-f305e01f-c627-4e3b-9061-c4d526afee1a req-fd7ca8d6-709d-4f01-822d-76fb528d95a8 service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] No waiting events found dispatching network-vif-plugged-cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1459.427566] env[63297]: WARNING nova.compute.manager [req-f305e01f-c627-4e3b-9061-c4d526afee1a req-fd7ca8d6-709d-4f01-822d-76fb528d95a8 service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Received unexpected event network-vif-plugged-cc29c84c-0884-4feb-9a78-7098d11b28ab for instance with vm_state building and task_state spawning. [ 1459.487237] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697595, 'name': ReconfigVM_Task, 'duration_secs': 0.226967} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.491096] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1459.491814] env[63297]: DEBUG oslo_vmware.api [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229523} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.492077] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7400199-c190-48a1-9c84-309429a18744 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.493922] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.494149] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.494444] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.494676] env[63297]: INFO nova.compute.manager [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1459.494954] env[63297]: DEBUG oslo.service.loopingcall [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.495468] env[63297]: DEBUG nova.compute.manager [-] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1459.495577] env[63297]: DEBUG nova.network.neutron [-] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1459.503983] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1459.503983] env[63297]: value = "task-1697596" [ 1459.503983] env[63297]: _type = "Task" [ 1459.503983] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.512493] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.578041] env[63297]: DEBUG oslo_concurrency.lockutils [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.578210] env[63297]: DEBUG oslo_concurrency.lockutils [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.578333] env[63297]: DEBUG nova.network.neutron [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1459.578505] env[63297]: DEBUG nova.objects.instance [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'info_cache' on Instance uuid 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.796120] env[63297]: DEBUG nova.network.neutron [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Successfully updated port: cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1459.960019] env[63297]: DEBUG nova.compute.manager [req-f172a033-f9f3-4066-8285-921cb48f93cc req-adc1bc30-926e-47d6-84f1-14dd67642f0b service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Received event network-vif-deleted-f15b3b92-ed83-4487-b142-801fa9f72581 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.960453] env[63297]: INFO nova.compute.manager [req-f172a033-f9f3-4066-8285-921cb48f93cc req-adc1bc30-926e-47d6-84f1-14dd67642f0b service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Neutron deleted interface f15b3b92-ed83-4487-b142-801fa9f72581; detaching it from the instance and deleting it from the info cache [ 1459.960633] env[63297]: DEBUG nova.network.neutron [req-f172a033-f9f3-4066-8285-921cb48f93cc req-adc1bc30-926e-47d6-84f1-14dd67642f0b service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.015979] env[63297]: DEBUG oslo_vmware.api [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697596, 'name': PowerOnVM_Task, 'duration_secs': 0.476147} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.016299] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1460.019091] env[63297]: DEBUG nova.compute.manager [None req-a48c3756-825b-45be-a268-eef630fdc51c tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1460.019934] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf3ce93-0087-4f2a-97fb-cbf03f3cb288 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.082275] env[63297]: DEBUG nova.objects.base [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Object Instance<5124f7fb-1293-4964-98c4-426ecfce7d10> lazy-loaded attributes: flavor,info_cache {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1460.299844] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.300012] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.300180] env[63297]: DEBUG nova.network.neutron [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1460.304050] env[63297]: DEBUG nova.network.neutron [-] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.465869] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5ce7d2c-b971-4905-bfee-4954a805fcec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.477697] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012bd971-bc05-48d6-9c79-cfa875cfa3fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.526052] env[63297]: DEBUG nova.compute.manager [req-f172a033-f9f3-4066-8285-921cb48f93cc req-adc1bc30-926e-47d6-84f1-14dd67642f0b service nova] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Detach interface failed, port_id=f15b3b92-ed83-4487-b142-801fa9f72581, reason: Instance c05a249e-ab88-41f0-81f5-b644b3da5d2d could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1460.666320] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688c77c3-c96d-48c7-a86a-7fb38f25d2c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.679102] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992599bb-8a3a-4683-9cb7-2445b6daed3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.718877] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6395a723-9235-441f-8117-f7acf1097099 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.731276] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df1faaf-daba-4f1d-9219-bfb0f0cfc02b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.749186] env[63297]: DEBUG nova.compute.provider_tree [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1460.806814] env[63297]: INFO nova.compute.manager [-] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Took 1.31 seconds to deallocate network for instance. [ 1460.865564] env[63297]: DEBUG nova.network.neutron [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1460.947208] env[63297]: DEBUG nova.network.neutron [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updating instance_info_cache with network_info: [{"id": "1b123801-2747-40a9-84bc-ae5dc9595556", "address": "fa:16:3e:42:cf:1d", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b123801-27", "ovs_interfaceid": "1b123801-2747-40a9-84bc-ae5dc9595556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.055737] env[63297]: DEBUG nova.network.neutron [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Updating instance_info_cache with network_info: [{"id": "cc29c84c-0884-4feb-9a78-7098d11b28ab", "address": "fa:16:3e:07:db:9a", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc29c84c-08", "ovs_interfaceid": "cc29c84c-0884-4feb-9a78-7098d11b28ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.253547] env[63297]: DEBUG nova.scheduler.client.report [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1461.315571] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.451858] env[63297]: DEBUG oslo_concurrency.lockutils [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Releasing lock "refresh_cache-5124f7fb-1293-4964-98c4-426ecfce7d10" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.455604] env[63297]: DEBUG nova.compute.manager [req-f5b774b4-8818-4bcf-8c0d-af6454bf7b6d req-2ccaed96-321c-46a9-ab8a-158061e97b7b service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Received event network-changed-cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1461.456158] env[63297]: DEBUG nova.compute.manager [req-f5b774b4-8818-4bcf-8c0d-af6454bf7b6d req-2ccaed96-321c-46a9-ab8a-158061e97b7b service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Refreshing instance network info cache due to event network-changed-cc29c84c-0884-4feb-9a78-7098d11b28ab. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1461.456252] env[63297]: DEBUG oslo_concurrency.lockutils [req-f5b774b4-8818-4bcf-8c0d-af6454bf7b6d req-2ccaed96-321c-46a9-ab8a-158061e97b7b service nova] Acquiring lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.559131] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.559131] env[63297]: DEBUG nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Instance network_info: |[{"id": "cc29c84c-0884-4feb-9a78-7098d11b28ab", "address": "fa:16:3e:07:db:9a", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc29c84c-08", "ovs_interfaceid": "cc29c84c-0884-4feb-9a78-7098d11b28ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1461.559518] env[63297]: DEBUG oslo_concurrency.lockutils [req-f5b774b4-8818-4bcf-8c0d-af6454bf7b6d req-2ccaed96-321c-46a9-ab8a-158061e97b7b service nova] Acquired lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.559735] env[63297]: DEBUG nova.network.neutron [req-f5b774b4-8818-4bcf-8c0d-af6454bf7b6d req-2ccaed96-321c-46a9-ab8a-158061e97b7b service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Refreshing network info cache for port cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1461.562488] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:db:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc29c84c-0884-4feb-9a78-7098d11b28ab', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1461.570949] env[63297]: DEBUG oslo.service.loopingcall [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.572070] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1461.573350] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de56feeb-6a47-43ba-9b65-3bc82e14fc27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.594432] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1461.594432] env[63297]: value = "task-1697597" [ 1461.594432] env[63297]: _type = "Task" [ 1461.594432] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.603956] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697597, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.758412] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.759263] env[63297]: DEBUG nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1461.762895] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.670s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.763234] env[63297]: DEBUG nova.objects.instance [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lazy-loading 'resources' on Instance uuid c147f97d-7fae-4364-a9c0-04978df2450f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1461.907362] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "5914b3ce-f40f-4782-b56a-9fc29c819938" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.907362] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "5914b3ce-f40f-4782-b56a-9fc29c819938" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.958248] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1461.958805] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b379763-a212-4f54-91de-074c7fc67066 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.969024] env[63297]: DEBUG oslo_vmware.api [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1461.969024] env[63297]: value = "task-1697598" [ 1461.969024] env[63297]: _type = "Task" [ 1461.969024] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.979763] env[63297]: DEBUG oslo_vmware.api [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.106820] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697597, 'name': CreateVM_Task, 'duration_secs': 0.458165} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.107051] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1462.107638] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.107787] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.108143] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1462.110626] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edcf6424-3cea-46a7-9ac6-939c9b69280e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.117325] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1462.117325] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521c4a3f-3623-f694-dfeb-e2133217c31f" [ 1462.117325] env[63297]: _type = "Task" [ 1462.117325] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.128414] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521c4a3f-3623-f694-dfeb-e2133217c31f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.267199] env[63297]: DEBUG nova.compute.utils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1462.268825] env[63297]: DEBUG nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1462.269565] env[63297]: DEBUG nova.network.neutron [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1462.342917] env[63297]: DEBUG nova.policy [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ab498375eb47a3923ac10343c11d34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d5cb4b4799b4b8b99648e718dbc0254', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1462.376030] env[63297]: DEBUG nova.network.neutron [req-f5b774b4-8818-4bcf-8c0d-af6454bf7b6d req-2ccaed96-321c-46a9-ab8a-158061e97b7b service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Updated VIF entry in instance network info cache for port cc29c84c-0884-4feb-9a78-7098d11b28ab. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1462.376401] env[63297]: DEBUG nova.network.neutron [req-f5b774b4-8818-4bcf-8c0d-af6454bf7b6d req-2ccaed96-321c-46a9-ab8a-158061e97b7b service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Updating instance_info_cache with network_info: [{"id": "cc29c84c-0884-4feb-9a78-7098d11b28ab", "address": "fa:16:3e:07:db:9a", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc29c84c-08", "ovs_interfaceid": "cc29c84c-0884-4feb-9a78-7098d11b28ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.409062] env[63297]: DEBUG nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1462.485069] env[63297]: DEBUG oslo_vmware.api [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697598, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.631805] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521c4a3f-3623-f694-dfeb-e2133217c31f, 'name': SearchDatastore_Task, 'duration_secs': 0.013104} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.632255] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.632456] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1462.632700] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.632839] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.634314] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1462.634372] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-464a5ca3-8262-4522-9591-3e531ba94163 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.648938] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1462.649166] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1462.652802] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb28a448-4ef3-4327-9c24-292174f3efd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.661155] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1462.661155] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528c2507-bcc9-4eb6-0b1b-ac6fba44c12e" [ 1462.661155] env[63297]: _type = "Task" [ 1462.661155] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.668754] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528c2507-bcc9-4eb6-0b1b-ac6fba44c12e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.753573] env[63297]: DEBUG nova.network.neutron [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Successfully created port: fb29440a-5455-4b45-b672-3aa307f31cf0 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1462.773624] env[63297]: DEBUG oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52958274-745d-40c2-1d32-f53fd55899b1/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1462.774708] env[63297]: DEBUG nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1462.778954] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c61d39a-900e-4065-b155-672a88c7d731 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.791117] env[63297]: DEBUG oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52958274-745d-40c2-1d32-f53fd55899b1/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1462.793384] env[63297]: ERROR oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52958274-745d-40c2-1d32-f53fd55899b1/disk-0.vmdk due to incomplete transfer. [ 1462.793384] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-11bbb537-3ac8-4ec2-a299-46cc3c7cb956 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.800073] env[63297]: DEBUG oslo_vmware.rw_handles [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52958274-745d-40c2-1d32-f53fd55899b1/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1462.800309] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Uploaded image dd69ed84-813a-498d-a635-ba1ad9182458 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1462.802831] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1462.803162] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-90867677-f280-42ee-9613-cd6ac015ccd5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.814308] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1462.814308] env[63297]: value = "task-1697599" [ 1462.814308] env[63297]: _type = "Task" [ 1462.814308] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.823728] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697599, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.826779] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60411441-af1c-4595-9414-dc184a6f86f9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.836139] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d84067-7742-4a22-8c8b-f25dd9618f3a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.875077] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f73999-99e5-4e23-8023-8f748ce8459f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.878882] env[63297]: DEBUG oslo_concurrency.lockutils [req-f5b774b4-8818-4bcf-8c0d-af6454bf7b6d req-2ccaed96-321c-46a9-ab8a-158061e97b7b service nova] Releasing lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.884491] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d939df-0964-4974-8884-c809df05cfcc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.900928] env[63297]: DEBUG nova.compute.provider_tree [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.933883] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.979997] env[63297]: DEBUG oslo_vmware.api [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697598, 'name': PowerOnVM_Task, 'duration_secs': 0.682761} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.980336] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1462.980525] env[63297]: DEBUG nova.compute.manager [None req-83a658b2-6573-4f18-8cb9-7a49d5ee5dbd tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1462.981344] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23112e0-7bf8-433b-9d5c-0d84a6b8f887 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.172232] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528c2507-bcc9-4eb6-0b1b-ac6fba44c12e, 'name': SearchDatastore_Task, 'duration_secs': 0.022949} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.173106] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfb23f5b-4733-4c65-a91e-9591d0e3a648 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.179851] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1463.179851] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d6bd2-37c6-86d0-c75b-fc4bd3bee714" [ 1463.179851] env[63297]: _type = "Task" [ 1463.179851] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.188598] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d6bd2-37c6-86d0-c75b-fc4bd3bee714, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.326545] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697599, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.404980] env[63297]: DEBUG nova.scheduler.client.report [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1463.691209] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d6bd2-37c6-86d0-c75b-fc4bd3bee714, 'name': SearchDatastore_Task, 'duration_secs': 0.053971} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.695031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.695031] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 96265295-6b0c-4803-bb89-6166c9d3fc7f/96265295-6b0c-4803-bb89-6166c9d3fc7f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.695031] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d30b020a-a1ea-4c53-b46b-40c6be86b3dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.700501] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1463.700501] env[63297]: value = "task-1697600" [ 1463.700501] env[63297]: _type = "Task" [ 1463.700501] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.710908] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697600, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.787993] env[63297]: DEBUG nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1463.813255] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1463.813518] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1463.813675] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1463.814076] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1463.814076] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1463.814183] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1463.814352] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1463.814508] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1463.814670] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1463.814828] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1463.814996] env[63297]: DEBUG nova.virt.hardware [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1463.815895] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2114d9-3f65-4b7e-a8ad-a64224d706aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.831177] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f440ece-a519-4e9b-9220-7ee8b379abc2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.835101] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697599, 'name': Destroy_Task, 'duration_secs': 0.839414} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.835358] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Destroyed the VM [ 1463.835586] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1463.836138] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0df6c069-c7c6-4d7e-8f4b-6b1beeaec210 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.847261] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1463.847261] env[63297]: value = "task-1697601" [ 1463.847261] env[63297]: _type = "Task" [ 1463.847261] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.856380] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697601, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.911504] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.913969] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.522s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.914613] env[63297]: DEBUG nova.objects.instance [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lazy-loading 'resources' on Instance uuid fb33135a-073d-4d80-9833-5b29afae1cc6 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1463.937990] env[63297]: INFO nova.scheduler.client.report [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleted allocations for instance c147f97d-7fae-4364-a9c0-04978df2450f [ 1464.213717] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697600, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.360297] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697601, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.447616] env[63297]: DEBUG oslo_concurrency.lockutils [None req-258da812-48e7-4878-b6da-49ff8ab6f4e6 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c147f97d-7fae-4364-a9c0-04978df2450f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.574s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.494773] env[63297]: DEBUG nova.compute.manager [req-a5af7077-af7f-402c-bd2b-fab486cbbb55 req-819daad4-b4d1-4a2e-adac-0a6c4061a8bf service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Received event network-vif-plugged-fb29440a-5455-4b45-b672-3aa307f31cf0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1464.494773] env[63297]: DEBUG oslo_concurrency.lockutils [req-a5af7077-af7f-402c-bd2b-fab486cbbb55 req-819daad4-b4d1-4a2e-adac-0a6c4061a8bf service nova] Acquiring lock "71faf167-dfe3-4792-9841-b5ab4b333884-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.494773] env[63297]: DEBUG oslo_concurrency.lockutils [req-a5af7077-af7f-402c-bd2b-fab486cbbb55 req-819daad4-b4d1-4a2e-adac-0a6c4061a8bf service nova] Lock "71faf167-dfe3-4792-9841-b5ab4b333884-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.494773] env[63297]: DEBUG oslo_concurrency.lockutils [req-a5af7077-af7f-402c-bd2b-fab486cbbb55 req-819daad4-b4d1-4a2e-adac-0a6c4061a8bf service nova] Lock "71faf167-dfe3-4792-9841-b5ab4b333884-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.494773] env[63297]: DEBUG nova.compute.manager [req-a5af7077-af7f-402c-bd2b-fab486cbbb55 req-819daad4-b4d1-4a2e-adac-0a6c4061a8bf service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] No waiting events found dispatching network-vif-plugged-fb29440a-5455-4b45-b672-3aa307f31cf0 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1464.494773] env[63297]: WARNING nova.compute.manager [req-a5af7077-af7f-402c-bd2b-fab486cbbb55 req-819daad4-b4d1-4a2e-adac-0a6c4061a8bf service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Received unexpected event network-vif-plugged-fb29440a-5455-4b45-b672-3aa307f31cf0 for instance with vm_state building and task_state spawning. [ 1464.516433] env[63297]: DEBUG nova.network.neutron [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Successfully updated port: fb29440a-5455-4b45-b672-3aa307f31cf0 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1464.713695] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697600, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.734164} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.716863] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 96265295-6b0c-4803-bb89-6166c9d3fc7f/96265295-6b0c-4803-bb89-6166c9d3fc7f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1464.717183] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1464.717738] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8a3c253-feb5-4624-b9cd-c21da1039edd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.731369] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1464.731369] env[63297]: value = "task-1697602" [ 1464.731369] env[63297]: _type = "Task" [ 1464.731369] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.745357] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.842957] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d49e812-f461-4370-bcc2-265b3cfd9850 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.854804] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37439bab-50a2-4bcc-b99c-afaa8a55267f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.863823] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697601, 'name': RemoveSnapshot_Task, 'duration_secs': 0.911554} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.864568] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1464.864982] env[63297]: DEBUG nova.compute.manager [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1464.865778] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a5faab-a7ed-45ee-ad28-78c4bc69cc21 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.894414] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674861be-6069-43da-a80e-ef07c25864bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.906728] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bedc9e-bd94-4f87-aa62-a2d694a9fa7d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.921552] env[63297]: DEBUG nova.compute.provider_tree [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.020682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "refresh_cache-71faf167-dfe3-4792-9841-b5ab4b333884" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.020682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "refresh_cache-71faf167-dfe3-4792-9841-b5ab4b333884" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.020820] env[63297]: DEBUG nova.network.neutron [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1465.245424] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072282} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.245717] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1465.246517] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a94e02-9904-4b09-81a0-9758ceb7565c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.272087] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 96265295-6b0c-4803-bb89-6166c9d3fc7f/96265295-6b0c-4803-bb89-6166c9d3fc7f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1465.272388] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32aa5409-c683-42bd-8f59-06e18e1eac39 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.296338] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1465.296338] env[63297]: value = "task-1697603" [ 1465.296338] env[63297]: _type = "Task" [ 1465.296338] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.305221] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697603, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.405102] env[63297]: INFO nova.compute.manager [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Shelve offloading [ 1465.408068] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1465.408380] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18bf3f53-b3c0-4f66-88b6-35234ea38724 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.417699] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1465.417699] env[63297]: value = "task-1697604" [ 1465.417699] env[63297]: _type = "Task" [ 1465.417699] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.430732] env[63297]: DEBUG nova.scheduler.client.report [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1465.437331] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1465.437331] env[63297]: DEBUG nova.compute.manager [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1465.437331] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9702ab62-07c0-4bf8-9a64-92890529ded2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.445172] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.445344] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.445542] env[63297]: DEBUG nova.network.neutron [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1465.554375] env[63297]: DEBUG nova.network.neutron [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1465.706731] env[63297]: DEBUG nova.network.neutron [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Updating instance_info_cache with network_info: [{"id": "fb29440a-5455-4b45-b672-3aa307f31cf0", "address": "fa:16:3e:fb:9d:75", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb29440a-54", "ovs_interfaceid": "fb29440a-5455-4b45-b672-3aa307f31cf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.806898] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697603, 'name': ReconfigVM_Task, 'duration_secs': 0.27491} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.807202] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 96265295-6b0c-4803-bb89-6166c9d3fc7f/96265295-6b0c-4803-bb89-6166c9d3fc7f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1465.807841] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-007f369d-52a1-4ea4-b163-29f86203af69 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.817792] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1465.817792] env[63297]: value = "task-1697605" [ 1465.817792] env[63297]: _type = "Task" [ 1465.817792] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.825732] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697605, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.937159] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.022s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.939835] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.384s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.939835] env[63297]: DEBUG nova.objects.instance [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lazy-loading 'resources' on Instance uuid c83c23d9-a8ec-4a87-8a8c-067e18d2615a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1465.959613] env[63297]: INFO nova.scheduler.client.report [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Deleted allocations for instance fb33135a-073d-4d80-9833-5b29afae1cc6 [ 1466.209374] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "refresh_cache-71faf167-dfe3-4792-9841-b5ab4b333884" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.209704] env[63297]: DEBUG nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Instance network_info: |[{"id": "fb29440a-5455-4b45-b672-3aa307f31cf0", "address": "fa:16:3e:fb:9d:75", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb29440a-54", "ovs_interfaceid": "fb29440a-5455-4b45-b672-3aa307f31cf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1466.210304] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:9d:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb29440a-5455-4b45-b672-3aa307f31cf0', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1466.220293] env[63297]: DEBUG oslo.service.loopingcall [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1466.220556] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1466.222181] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8da0c0a9-dcd8-4a83-b706-40a2c5678d1e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.248960] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1466.248960] env[63297]: value = "task-1697606" [ 1466.248960] env[63297]: _type = "Task" [ 1466.248960] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.253767] env[63297]: DEBUG nova.network.neutron [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [{"id": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "address": "fa:16:3e:13:0a:af", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd69c1e-7e", "ovs_interfaceid": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.261045] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697606, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.329583] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697605, 'name': Rename_Task, 'duration_secs': 0.144442} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.330394] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.330667] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92590d44-4db1-4265-b085-a6745962a0d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.338585] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1466.338585] env[63297]: value = "task-1697607" [ 1466.338585] env[63297]: _type = "Task" [ 1466.338585] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.348068] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.471018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aba52549-bb1c-4cf4-8d4c-1b0ee4acf36e tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "fb33135a-073d-4d80-9833-5b29afae1cc6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.885s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.595661] env[63297]: DEBUG nova.compute.manager [req-9fb42a32-5500-459a-8619-2849c1ce1e10 req-77f8b980-9603-49cf-94c7-d4e2ea067fb3 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Received event network-changed-fb29440a-5455-4b45-b672-3aa307f31cf0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1466.595661] env[63297]: DEBUG nova.compute.manager [req-9fb42a32-5500-459a-8619-2849c1ce1e10 req-77f8b980-9603-49cf-94c7-d4e2ea067fb3 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Refreshing instance network info cache due to event network-changed-fb29440a-5455-4b45-b672-3aa307f31cf0. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1466.595661] env[63297]: DEBUG oslo_concurrency.lockutils [req-9fb42a32-5500-459a-8619-2849c1ce1e10 req-77f8b980-9603-49cf-94c7-d4e2ea067fb3 service nova] Acquiring lock "refresh_cache-71faf167-dfe3-4792-9841-b5ab4b333884" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.595661] env[63297]: DEBUG oslo_concurrency.lockutils [req-9fb42a32-5500-459a-8619-2849c1ce1e10 req-77f8b980-9603-49cf-94c7-d4e2ea067fb3 service nova] Acquired lock "refresh_cache-71faf167-dfe3-4792-9841-b5ab4b333884" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.595883] env[63297]: DEBUG nova.network.neutron [req-9fb42a32-5500-459a-8619-2849c1ce1e10 req-77f8b980-9603-49cf-94c7-d4e2ea067fb3 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Refreshing network info cache for port fb29440a-5455-4b45-b672-3aa307f31cf0 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1466.757315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.767177] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697606, 'name': CreateVM_Task, 'duration_secs': 0.381119} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.767360] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1466.767993] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.768175] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.768492] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1466.768745] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb62b2c6-db4a-4184-9750-a6f5059fcea5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.774075] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1466.774075] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529bda83-046f-e79e-e6f0-25842a3899c9" [ 1466.774075] env[63297]: _type = "Task" [ 1466.774075] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.788921] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529bda83-046f-e79e-e6f0-25842a3899c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.853525] env[63297]: DEBUG oslo_vmware.api [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1697607, 'name': PowerOnVM_Task, 'duration_secs': 0.504356} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.853816] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1466.854027] env[63297]: INFO nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1466.854244] env[63297]: DEBUG nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1466.854997] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84f5e25-b102-4e93-8000-c8f2e0b8fed5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.889038] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e8f017-11e3-40f4-a5d5-7a55a7594934 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.897803] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8395605-d554-404d-bd5a-89e63cb26457 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.933782] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec4eb84-d6bc-47a5-9ae6-4bd3a770726b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.945560] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6642f39-b0db-4e6c-a047-42caf27a68e7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.960054] env[63297]: DEBUG nova.compute.provider_tree [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.067704] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1467.068650] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56859bb4-01b0-45cf-8a0d-11d6e5b905d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.077707] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1467.078094] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a802036-dbc6-41fd-8660-8c83ec5f306a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.155970] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1467.156294] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1467.156577] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleting the datastore file [datastore1] fba9040d-f904-44a1-8785-14d4696ea939 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1467.156748] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ac8b432-b4ec-4c22-87b3-5b1828174e93 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.164796] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1467.164796] env[63297]: value = "task-1697609" [ 1467.164796] env[63297]: _type = "Task" [ 1467.164796] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.173718] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697609, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.185371] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "dc196e68-b725-43a1-9848-e84d1b138245" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.185661] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "dc196e68-b725-43a1-9848-e84d1b138245" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.185884] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "dc196e68-b725-43a1-9848-e84d1b138245-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.186103] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "dc196e68-b725-43a1-9848-e84d1b138245-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.186339] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "dc196e68-b725-43a1-9848-e84d1b138245-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.188442] env[63297]: INFO nova.compute.manager [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Terminating instance [ 1467.190087] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "refresh_cache-dc196e68-b725-43a1-9848-e84d1b138245" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.190687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquired lock "refresh_cache-dc196e68-b725-43a1-9848-e84d1b138245" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.190687] env[63297]: DEBUG nova.network.neutron [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1467.285649] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529bda83-046f-e79e-e6f0-25842a3899c9, 'name': SearchDatastore_Task, 'duration_secs': 0.018643} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.286026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.286307] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1467.286582] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.286765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.286984] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1467.287299] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b63cee90-92ba-4454-900e-bff431655ad9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.297455] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1467.297762] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1467.298641] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e6e1bd4-39fc-487f-b244-9fef4e9d9c03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.305289] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1467.305289] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a451a3-a9b5-bf78-c232-bcca630dabc3" [ 1467.305289] env[63297]: _type = "Task" [ 1467.305289] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.314706] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a451a3-a9b5-bf78-c232-bcca630dabc3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.375855] env[63297]: INFO nova.compute.manager [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Took 57.15 seconds to build instance. [ 1467.466025] env[63297]: DEBUG nova.scheduler.client.report [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1467.638201] env[63297]: DEBUG nova.network.neutron [req-9fb42a32-5500-459a-8619-2849c1ce1e10 req-77f8b980-9603-49cf-94c7-d4e2ea067fb3 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Updated VIF entry in instance network info cache for port fb29440a-5455-4b45-b672-3aa307f31cf0. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1467.638644] env[63297]: DEBUG nova.network.neutron [req-9fb42a32-5500-459a-8619-2849c1ce1e10 req-77f8b980-9603-49cf-94c7-d4e2ea067fb3 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Updating instance_info_cache with network_info: [{"id": "fb29440a-5455-4b45-b672-3aa307f31cf0", "address": "fa:16:3e:fb:9d:75", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb29440a-54", "ovs_interfaceid": "fb29440a-5455-4b45-b672-3aa307f31cf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.675618] env[63297]: DEBUG oslo_vmware.api [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697609, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348728} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.676236] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1467.676236] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1467.676565] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1467.706387] env[63297]: INFO nova.scheduler.client.report [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleted allocations for instance fba9040d-f904-44a1-8785-14d4696ea939 [ 1467.721601] env[63297]: DEBUG nova.network.neutron [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1467.816913] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a451a3-a9b5-bf78-c232-bcca630dabc3, 'name': SearchDatastore_Task, 'duration_secs': 0.010439} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.817797] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb12b4b0-b1a6-4536-9957-d2a8fb4d07a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.820897] env[63297]: DEBUG nova.network.neutron [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.823334] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1467.823334] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c023e0-7d35-906c-063f-a5ef10cf8595" [ 1467.823334] env[63297]: _type = "Task" [ 1467.823334] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.832678] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c023e0-7d35-906c-063f-a5ef10cf8595, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.879208] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6b9583d1-b03d-4138-bc85-a5444aa92302 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "96265295-6b0c-4803-bb89-6166c9d3fc7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.657s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.969925] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.973475] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 51.027s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.973857] env[63297]: DEBUG nova.objects.instance [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lazy-loading 'resources' on Instance uuid a57c0638-e14b-4474-a6b4-7184d7e2a0fe {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1467.990132] env[63297]: INFO nova.scheduler.client.report [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleted allocations for instance c83c23d9-a8ec-4a87-8a8c-067e18d2615a [ 1468.051302] env[63297]: DEBUG nova.compute.manager [req-8826b362-5111-4955-a783-ad6f04ae2bde req-e23f1a0d-4e5b-4d30-87f8-365b09169253 service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Received event network-changed-cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1468.051302] env[63297]: DEBUG nova.compute.manager [req-8826b362-5111-4955-a783-ad6f04ae2bde req-e23f1a0d-4e5b-4d30-87f8-365b09169253 service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Refreshing instance network info cache due to event network-changed-cc29c84c-0884-4feb-9a78-7098d11b28ab. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1468.051418] env[63297]: DEBUG oslo_concurrency.lockutils [req-8826b362-5111-4955-a783-ad6f04ae2bde req-e23f1a0d-4e5b-4d30-87f8-365b09169253 service nova] Acquiring lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.051700] env[63297]: DEBUG oslo_concurrency.lockutils [req-8826b362-5111-4955-a783-ad6f04ae2bde req-e23f1a0d-4e5b-4d30-87f8-365b09169253 service nova] Acquired lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.051977] env[63297]: DEBUG nova.network.neutron [req-8826b362-5111-4955-a783-ad6f04ae2bde req-e23f1a0d-4e5b-4d30-87f8-365b09169253 service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Refreshing network info cache for port cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1468.141952] env[63297]: DEBUG oslo_concurrency.lockutils [req-9fb42a32-5500-459a-8619-2849c1ce1e10 req-77f8b980-9603-49cf-94c7-d4e2ea067fb3 service nova] Releasing lock "refresh_cache-71faf167-dfe3-4792-9841-b5ab4b333884" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.213469] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.324636] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Releasing lock "refresh_cache-dc196e68-b725-43a1-9848-e84d1b138245" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.325083] env[63297]: DEBUG nova.compute.manager [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1468.325293] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1468.326291] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c680d800-3091-458f-bd43-d6e6cedc9616 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.340042] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c023e0-7d35-906c-063f-a5ef10cf8595, 'name': SearchDatastore_Task, 'duration_secs': 0.018868} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.341511] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.341774] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 71faf167-dfe3-4792-9841-b5ab4b333884/71faf167-dfe3-4792-9841-b5ab4b333884.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1468.342077] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1468.342297] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e724fb9f-4ed2-46db-9ff7-41d19fbe8f56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.344477] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d3c556e-2877-4ece-bbef-632f0a79c880 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.351422] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1468.351422] env[63297]: value = "task-1697610" [ 1468.351422] env[63297]: _type = "Task" [ 1468.351422] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.352654] env[63297]: DEBUG oslo_vmware.api [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1468.352654] env[63297]: value = "task-1697611" [ 1468.352654] env[63297]: _type = "Task" [ 1468.352654] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.364916] env[63297]: DEBUG oslo_vmware.api [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.368271] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.506404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fcc4196-5c37-4929-9cef-c0823b800221 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "c83c23d9-a8ec-4a87-8a8c-067e18d2615a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.458s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.786356] env[63297]: DEBUG nova.compute.manager [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received event network-vif-unplugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1468.786356] env[63297]: DEBUG oslo_concurrency.lockutils [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] Acquiring lock "fba9040d-f904-44a1-8785-14d4696ea939-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.786356] env[63297]: DEBUG oslo_concurrency.lockutils [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] Lock "fba9040d-f904-44a1-8785-14d4696ea939-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.786356] env[63297]: DEBUG oslo_concurrency.lockutils [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] Lock "fba9040d-f904-44a1-8785-14d4696ea939-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.786356] env[63297]: DEBUG nova.compute.manager [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] No waiting events found dispatching network-vif-unplugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1468.786356] env[63297]: WARNING nova.compute.manager [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received unexpected event network-vif-unplugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 for instance with vm_state shelved_offloaded and task_state None. [ 1468.786356] env[63297]: DEBUG nova.compute.manager [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received event network-changed-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1468.786356] env[63297]: DEBUG nova.compute.manager [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Refreshing instance network info cache due to event network-changed-1dd69c1e-7eee-4b1b-b4a7-421ab5477495. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1468.786356] env[63297]: DEBUG oslo_concurrency.lockutils [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] Acquiring lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.786356] env[63297]: DEBUG oslo_concurrency.lockutils [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] Acquired lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.786356] env[63297]: DEBUG nova.network.neutron [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Refreshing network info cache for port 1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1468.870064] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697610, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.876164] env[63297]: DEBUG oslo_vmware.api [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697611, 'name': PowerOffVM_Task, 'duration_secs': 0.305341} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.876653] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1468.876858] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1468.877186] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2caf9fe2-384c-455f-80a2-4a5628e5637a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.916120] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1468.916502] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1468.916864] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Deleting the datastore file [datastore1] dc196e68-b725-43a1-9848-e84d1b138245 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1468.917819] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ff52eac-7c67-4895-bb9e-26c341f45832 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.929652] env[63297]: DEBUG oslo_vmware.api [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for the task: (returnval){ [ 1468.929652] env[63297]: value = "task-1697613" [ 1468.929652] env[63297]: _type = "Task" [ 1468.929652] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.947038] env[63297]: DEBUG oslo_vmware.api [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697613, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.953193] env[63297]: DEBUG nova.network.neutron [req-8826b362-5111-4955-a783-ad6f04ae2bde req-e23f1a0d-4e5b-4d30-87f8-365b09169253 service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Updated VIF entry in instance network info cache for port cc29c84c-0884-4feb-9a78-7098d11b28ab. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1468.953760] env[63297]: DEBUG nova.network.neutron [req-8826b362-5111-4955-a783-ad6f04ae2bde req-e23f1a0d-4e5b-4d30-87f8-365b09169253 service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Updating instance_info_cache with network_info: [{"id": "cc29c84c-0884-4feb-9a78-7098d11b28ab", "address": "fa:16:3e:07:db:9a", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc29c84c-08", "ovs_interfaceid": "cc29c84c-0884-4feb-9a78-7098d11b28ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.099591] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2742d824-f771-4bd3-8b92-c34a2ea407ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.110634] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c281cec4-8a84-4c9e-9a3f-dff6fdd6408d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.144395] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b85dc6-4806-43c9-a458-fbe39d242a3d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.153182] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6180656e-90c6-47d7-a0f1-eb2b71e040ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.169136] env[63297]: DEBUG nova.compute.provider_tree [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1469.371095] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "fc54a008-eb2e-4b10-86ea-be7c82b93139" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.371394] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "fc54a008-eb2e-4b10-86ea-be7c82b93139" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.372712] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697610, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642434} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.375535] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 71faf167-dfe3-4792-9841-b5ab4b333884/71faf167-dfe3-4792-9841-b5ab4b333884.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1469.375835] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1469.377102] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5415c084-469d-4f21-97b4-75bf67c6f679 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.388672] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1469.388672] env[63297]: value = "task-1697614" [ 1469.388672] env[63297]: _type = "Task" [ 1469.388672] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.402378] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "4701073f-eeee-4f37-919a-4c53663ac15f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.402378] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "4701073f-eeee-4f37-919a-4c53663ac15f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.403638] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.444223] env[63297]: DEBUG oslo_vmware.api [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Task: {'id': task-1697613, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.384926} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.446680] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1469.447014] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1469.447180] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1469.447391] env[63297]: INFO nova.compute.manager [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1469.447686] env[63297]: DEBUG oslo.service.loopingcall [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1469.447919] env[63297]: DEBUG nova.compute.manager [-] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1469.448051] env[63297]: DEBUG nova.network.neutron [-] [instance: dc196e68-b725-43a1-9848-e84d1b138245] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1469.457748] env[63297]: DEBUG oslo_concurrency.lockutils [req-8826b362-5111-4955-a783-ad6f04ae2bde req-e23f1a0d-4e5b-4d30-87f8-365b09169253 service nova] Releasing lock "refresh_cache-96265295-6b0c-4803-bb89-6166c9d3fc7f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.466102] env[63297]: DEBUG nova.network.neutron [-] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1469.672303] env[63297]: DEBUG nova.scheduler.client.report [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1469.871453] env[63297]: DEBUG nova.network.neutron [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updated VIF entry in instance network info cache for port 1dd69c1e-7eee-4b1b-b4a7-421ab5477495. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1469.872453] env[63297]: DEBUG nova.network.neutron [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [{"id": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "address": "fa:16:3e:13:0a:af", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": null, "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap1dd69c1e-7e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.876646] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1469.908648] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078729} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.908648] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1469.908833] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077b022a-d7e9-4078-b697-67b6b98d2270 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.911763] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1469.936920] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 71faf167-dfe3-4792-9841-b5ab4b333884/71faf167-dfe3-4792-9841-b5ab4b333884.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1469.939662] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b4aee5c-b8a2-4ed2-bade-f666542b8b49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.966848] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1469.966848] env[63297]: value = "task-1697615" [ 1469.966848] env[63297]: _type = "Task" [ 1469.966848] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.970618] env[63297]: DEBUG nova.network.neutron [-] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.978624] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697615, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.179587] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.184290] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 51.408s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.184290] env[63297]: DEBUG nova.objects.instance [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1470.249381] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "fba9040d-f904-44a1-8785-14d4696ea939" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.378690] env[63297]: DEBUG oslo_concurrency.lockutils [req-a37ff742-f217-4a86-9650-d54b8a9e68b0 req-7043900c-34d4-4fed-b456-71c632b2b82e service nova] Releasing lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.401931] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.456280] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.474267] env[63297]: INFO nova.compute.manager [-] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Took 1.03 seconds to deallocate network for instance. [ 1470.478987] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697615, 'name': ReconfigVM_Task, 'duration_secs': 0.317814} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.479257] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 71faf167-dfe3-4792-9841-b5ab4b333884/71faf167-dfe3-4792-9841-b5ab4b333884.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1470.480085] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b068a5d-03b4-44f1-985e-dc417e25732a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.491167] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1470.491167] env[63297]: value = "task-1697616" [ 1470.491167] env[63297]: _type = "Task" [ 1470.491167] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.499766] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697616, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.706144] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b63ce27-6dfb-4be6-890e-6d6ea3eef07a tempest-ServerActionsV293TestJSON-1489413875 tempest-ServerActionsV293TestJSON-1489413875-project-member] Lock "a57c0638-e14b-4474-a6b4-7184d7e2a0fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.594s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.987769] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.004345] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697616, 'name': Rename_Task, 'duration_secs': 0.150986} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.004435] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1471.004692] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fed2906f-edb2-4924-ae18-1756019d3ff7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.012716] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1471.012716] env[63297]: value = "task-1697617" [ 1471.012716] env[63297]: _type = "Task" [ 1471.012716] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.021572] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697617, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.194390] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3f263dad-76f4-42d3-976c-0415267028d1 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.195968] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 50.700s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.195968] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.195968] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1471.196224] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 49.177s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.197800] env[63297]: INFO nova.compute.claims [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1471.201475] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c806c7-2373-4527-9841-95069f3ac465 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.214023] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095a4a6e-c383-4307-b774-d3d193c7da6e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.228772] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745572e9-0d6e-42b4-95da-5d12d0207ddb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.238720] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a677fdb-636a-4324-b04d-6726a0e1ab1a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.276628] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178577MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1471.276758] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.281172] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "42d872d6-da12-474b-8741-1d991d507cfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.281410] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "42d872d6-da12-474b-8741-1d991d507cfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.527043] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697617, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.589589] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.589883] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.784144] env[63297]: DEBUG nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1472.023242] env[63297]: DEBUG oslo_vmware.api [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697617, 'name': PowerOnVM_Task, 'duration_secs': 0.735307} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.023507] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1472.023708] env[63297]: INFO nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1472.023881] env[63297]: DEBUG nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1472.024666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6944a8cc-7798-4798-9c6d-45c80fd9d3b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.092354] env[63297]: DEBUG nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1472.300876] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.543136] env[63297]: INFO nova.compute.manager [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Took 59.56 seconds to build instance. [ 1472.613431] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.624967] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da0833e-6aca-4c69-9d6a-ba18a18830a3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.633713] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541a4262-ab7c-4163-ac62-0baa7bd0eb5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.664060] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c5ce97-6c7a-43fe-af36-c408b2d98ab7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.672124] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ec96b5-82fd-4274-b0a6-b4b9ba9a111d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.685698] env[63297]: DEBUG nova.compute.provider_tree [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.802389] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d918c574-6b5d-405a-8ee3-d70346a25e20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.809965] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-910516f5-17b8-4e63-85ec-3baffd4bc793 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Suspending the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1472.810234] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-0164b050-4ba7-4ad6-82f2-23ffc72708b9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.816900] env[63297]: DEBUG oslo_vmware.api [None req-910516f5-17b8-4e63-85ec-3baffd4bc793 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1472.816900] env[63297]: value = "task-1697618" [ 1472.816900] env[63297]: _type = "Task" [ 1472.816900] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.825621] env[63297]: DEBUG oslo_vmware.api [None req-910516f5-17b8-4e63-85ec-3baffd4bc793 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697618, 'name': SuspendVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.045270] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f1efe452-b9cc-42dd-bdf9-aee6662358d7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "71faf167-dfe3-4792-9841-b5ab4b333884" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.196s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.196099] env[63297]: DEBUG nova.scheduler.client.report [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1473.326947] env[63297]: DEBUG oslo_vmware.api [None req-910516f5-17b8-4e63-85ec-3baffd4bc793 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697618, 'name': SuspendVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.701770] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.505s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.702505] env[63297]: DEBUG nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1473.705399] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.790s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.706238] env[63297]: DEBUG nova.objects.instance [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lazy-loading 'resources' on Instance uuid 63785911-ea55-4aeb-9ba2-6cea5ddd9cae {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1473.829654] env[63297]: DEBUG oslo_vmware.api [None req-910516f5-17b8-4e63-85ec-3baffd4bc793 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697618, 'name': SuspendVM_Task, 'duration_secs': 0.610723} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.829935] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-910516f5-17b8-4e63-85ec-3baffd4bc793 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Suspended the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1473.830123] env[63297]: DEBUG nova.compute.manager [None req-910516f5-17b8-4e63-85ec-3baffd4bc793 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1473.830967] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15944adf-ec67-4f3c-89cc-f9f4c997686a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.208746] env[63297]: DEBUG nova.compute.utils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1474.212823] env[63297]: DEBUG nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1474.212995] env[63297]: DEBUG nova.network.neutron [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1474.286409] env[63297]: DEBUG nova.policy [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0da2fdb3c81747698f971951c5e0068b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc8039a70b34a269d3aed1ecb558b7e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1474.605417] env[63297]: DEBUG nova.network.neutron [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Successfully created port: 1188b948-a801-47e0-a828-a6ac36c83619 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1474.681876] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a9f01a-33f9-466b-914f-8ea72c915b8d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.692151] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09ffafe-d6eb-4ba2-9fb6-044660dca2b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.723188] env[63297]: DEBUG nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1474.726890] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe1bfc1-01d5-4ea2-94af-607234a8a175 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.738788] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcea4d7-edd0-4a73-9aa9-8db59afcaa43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.752436] env[63297]: DEBUG nova.compute.provider_tree [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.257192] env[63297]: DEBUG nova.scheduler.client.report [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1475.733163] env[63297]: DEBUG nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1475.754998] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1475.754998] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1475.754998] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1475.754998] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1475.755369] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1475.755369] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1475.755490] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1475.755663] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1475.755827] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1475.755990] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1475.756178] env[63297]: DEBUG nova.virt.hardware [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1475.757073] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afdaf53-fe29-437a-bce2-4315102a6fae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.763511] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.768229] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 51.765s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.771492] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29380bce-af56-4ffe-ba1e-b7efc2a730c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.787673] env[63297]: INFO nova.scheduler.client.report [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted allocations for instance 63785911-ea55-4aeb-9ba2-6cea5ddd9cae [ 1476.137644] env[63297]: DEBUG nova.compute.manager [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1476.138797] env[63297]: DEBUG nova.compute.manager [req-8ff5d139-646d-488e-adad-ae6acecf7ac1 req-2061f686-3b9f-4425-bb3b-e027227f4584 service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Received event network-vif-plugged-1188b948-a801-47e0-a828-a6ac36c83619 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.139816] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ff5d139-646d-488e-adad-ae6acecf7ac1 req-2061f686-3b9f-4425-bb3b-e027227f4584 service nova] Acquiring lock "efaa465d-f6b2-4891-8e96-b4c3af052759-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.139816] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ff5d139-646d-488e-adad-ae6acecf7ac1 req-2061f686-3b9f-4425-bb3b-e027227f4584 service nova] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.139816] env[63297]: DEBUG oslo_concurrency.lockutils [req-8ff5d139-646d-488e-adad-ae6acecf7ac1 req-2061f686-3b9f-4425-bb3b-e027227f4584 service nova] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.139816] env[63297]: DEBUG nova.compute.manager [req-8ff5d139-646d-488e-adad-ae6acecf7ac1 req-2061f686-3b9f-4425-bb3b-e027227f4584 service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] No waiting events found dispatching network-vif-plugged-1188b948-a801-47e0-a828-a6ac36c83619 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1476.139816] env[63297]: WARNING nova.compute.manager [req-8ff5d139-646d-488e-adad-ae6acecf7ac1 req-2061f686-3b9f-4425-bb3b-e027227f4584 service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Received unexpected event network-vif-plugged-1188b948-a801-47e0-a828-a6ac36c83619 for instance with vm_state building and task_state spawning. [ 1476.141192] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e9e5db-46b3-4d7c-9c51-21561f21f79a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.258729] env[63297]: DEBUG nova.network.neutron [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Successfully updated port: 1188b948-a801-47e0-a828-a6ac36c83619 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1476.274957] env[63297]: DEBUG nova.objects.instance [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lazy-loading 'migration_context' on Instance uuid b95b7656-70ac-4eaf-9934-4b4c50e78035 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1476.296268] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23f9b06a-11b1-47d0-9deb-0b08a98310c2 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "63785911-ea55-4aeb-9ba2-6cea5ddd9cae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.545s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.655020] env[63297]: INFO nova.compute.manager [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] instance snapshotting [ 1476.655236] env[63297]: WARNING nova.compute.manager [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1476.658038] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccb8ab8-c4db-4f8c-91d7-d7f0cb0f15ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.678811] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7aabea-83ed-4156-8d0c-a964ebcb1b73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.765927] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-efaa465d-f6b2-4891-8e96-b4c3af052759" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.766215] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-efaa465d-f6b2-4891-8e96-b4c3af052759" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.766252] env[63297]: DEBUG nova.network.neutron [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.107170] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "8fa5fef6-8768-4e24-aab3-db56a10588c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.107445] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "8fa5fef6-8768-4e24-aab3-db56a10588c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.107655] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "8fa5fef6-8768-4e24-aab3-db56a10588c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.107842] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "8fa5fef6-8768-4e24-aab3-db56a10588c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.107982] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "8fa5fef6-8768-4e24-aab3-db56a10588c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.112286] env[63297]: INFO nova.compute.manager [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Terminating instance [ 1477.118613] env[63297]: DEBUG nova.compute.manager [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1477.118824] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.119752] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749113d3-ba11-4296-a7f6-ff4fa3d73bc3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.137787] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.138516] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a12370c7-82a7-4481-85fd-24cd6b7c8d6e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.149323] env[63297]: DEBUG oslo_vmware.api [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1477.149323] env[63297]: value = "task-1697619" [ 1477.149323] env[63297]: _type = "Task" [ 1477.149323] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.161823] env[63297]: DEBUG oslo_vmware.api [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697619, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.179559] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de1c51b-999d-4055-af06-3f0ed1264c25 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.187846] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51c1613-368f-4afe-902c-86ca35c2bd5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.192522] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1477.193035] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a89fdf0c-b2ed-4a4e-a606-7b272b52eb10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.230341] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c459a20e-8d58-4efb-a0a3-3f7ff1328162 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.231451] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1477.231451] env[63297]: value = "task-1697620" [ 1477.231451] env[63297]: _type = "Task" [ 1477.231451] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.241191] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfbd479-7567-4c3b-be61-0e4ae86af4d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.249412] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697620, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.262224] env[63297]: DEBUG nova.compute.provider_tree [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.302950] env[63297]: DEBUG nova.network.neutron [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1477.523690] env[63297]: DEBUG nova.network.neutron [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Updating instance_info_cache with network_info: [{"id": "1188b948-a801-47e0-a828-a6ac36c83619", "address": "fa:16:3e:5f:f9:54", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1188b948-a8", "ovs_interfaceid": "1188b948-a801-47e0-a828-a6ac36c83619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.659770] env[63297]: DEBUG oslo_vmware.api [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697619, 'name': PowerOffVM_Task, 'duration_secs': 0.248523} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.660056] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1477.660227] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1477.660475] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1514683b-ad04-4cd7-82e9-2fcca8f5806b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.743072] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697620, 'name': CreateSnapshot_Task, 'duration_secs': 0.50192} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.744505] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1477.744809] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1477.745012] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1477.745208] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleting the datastore file [datastore1] 8fa5fef6-8768-4e24-aab3-db56a10588c2 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1477.746073] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae25c16-cd73-4c6c-8a7f-ef8360d76842 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.748703] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7dad4462-8f7b-41ce-a3ef-2e05bab81d3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.760086] env[63297]: DEBUG oslo_vmware.api [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1477.760086] env[63297]: value = "task-1697622" [ 1477.760086] env[63297]: _type = "Task" [ 1477.760086] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.765975] env[63297]: DEBUG nova.scheduler.client.report [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1477.772615] env[63297]: DEBUG oslo_vmware.api [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.027044] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-efaa465d-f6b2-4891-8e96-b4c3af052759" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.027044] env[63297]: DEBUG nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Instance network_info: |[{"id": "1188b948-a801-47e0-a828-a6ac36c83619", "address": "fa:16:3e:5f:f9:54", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1188b948-a8", "ovs_interfaceid": "1188b948-a801-47e0-a828-a6ac36c83619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1478.027255] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:f9:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1188b948-a801-47e0-a828-a6ac36c83619', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.035266] env[63297]: DEBUG oslo.service.loopingcall [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.035548] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1478.035800] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06230f52-216d-48cd-8939-5261a8aef6fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.057021] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.057021] env[63297]: value = "task-1697623" [ 1478.057021] env[63297]: _type = "Task" [ 1478.057021] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.065806] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697623, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.165370] env[63297]: DEBUG nova.compute.manager [req-b7fb96ac-503a-46d8-beb9-ff7100cb2d03 req-4455403b-f128-4988-a8c9-60251e3df05a service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Received event network-changed-1188b948-a801-47e0-a828-a6ac36c83619 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.165643] env[63297]: DEBUG nova.compute.manager [req-b7fb96ac-503a-46d8-beb9-ff7100cb2d03 req-4455403b-f128-4988-a8c9-60251e3df05a service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Refreshing instance network info cache due to event network-changed-1188b948-a801-47e0-a828-a6ac36c83619. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1478.166719] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7fb96ac-503a-46d8-beb9-ff7100cb2d03 req-4455403b-f128-4988-a8c9-60251e3df05a service nova] Acquiring lock "refresh_cache-efaa465d-f6b2-4891-8e96-b4c3af052759" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.166719] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7fb96ac-503a-46d8-beb9-ff7100cb2d03 req-4455403b-f128-4988-a8c9-60251e3df05a service nova] Acquired lock "refresh_cache-efaa465d-f6b2-4891-8e96-b4c3af052759" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.166719] env[63297]: DEBUG nova.network.neutron [req-b7fb96ac-503a-46d8-beb9-ff7100cb2d03 req-4455403b-f128-4988-a8c9-60251e3df05a service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Refreshing network info cache for port 1188b948-a801-47e0-a828-a6ac36c83619 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.268142] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1478.268500] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2636683c-4656-4be9-9dcc-19cd46fa174d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.284670] env[63297]: DEBUG oslo_vmware.api [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128925} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.286081] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.286298] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1478.286528] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1478.286659] env[63297]: INFO nova.compute.manager [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1478.286903] env[63297]: DEBUG oslo.service.loopingcall [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.287206] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1478.287206] env[63297]: value = "task-1697624" [ 1478.287206] env[63297]: _type = "Task" [ 1478.287206] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.287424] env[63297]: DEBUG nova.compute.manager [-] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1478.287524] env[63297]: DEBUG nova.network.neutron [-] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1478.301148] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697624, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.571673] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697623, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.780594] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.013s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.786496] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 51.213s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.788477] env[63297]: INFO nova.compute.claims [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1478.808865] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697624, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.912929] env[63297]: DEBUG nova.network.neutron [req-b7fb96ac-503a-46d8-beb9-ff7100cb2d03 req-4455403b-f128-4988-a8c9-60251e3df05a service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Updated VIF entry in instance network info cache for port 1188b948-a801-47e0-a828-a6ac36c83619. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1478.913331] env[63297]: DEBUG nova.network.neutron [req-b7fb96ac-503a-46d8-beb9-ff7100cb2d03 req-4455403b-f128-4988-a8c9-60251e3df05a service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Updating instance_info_cache with network_info: [{"id": "1188b948-a801-47e0-a828-a6ac36c83619", "address": "fa:16:3e:5f:f9:54", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1188b948-a8", "ovs_interfaceid": "1188b948-a801-47e0-a828-a6ac36c83619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.049936] env[63297]: DEBUG nova.network.neutron [-] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.072854] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697623, 'name': CreateVM_Task, 'duration_secs': 0.601631} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.073121] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1479.073863] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.074049] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.074394] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1479.074889] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5cea02d-10c0-4784-aec3-9d1b79f8c1a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.081164] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1479.081164] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5269cbb5-e5e7-f26b-c73c-9fdf85a0e0d8" [ 1479.081164] env[63297]: _type = "Task" [ 1479.081164] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.089610] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5269cbb5-e5e7-f26b-c73c-9fdf85a0e0d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.309819] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697624, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.416309] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7fb96ac-503a-46d8-beb9-ff7100cb2d03 req-4455403b-f128-4988-a8c9-60251e3df05a service nova] Releasing lock "refresh_cache-efaa465d-f6b2-4891-8e96-b4c3af052759" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.553063] env[63297]: INFO nova.compute.manager [-] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Took 1.27 seconds to deallocate network for instance. [ 1479.592029] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5269cbb5-e5e7-f26b-c73c-9fdf85a0e0d8, 'name': SearchDatastore_Task, 'duration_secs': 0.011858} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.592275] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.592517] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.592769] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.592918] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.593141] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.593424] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d05e35bc-56f0-4bfe-9834-9ab89ff67517 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.612394] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.612567] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1479.613441] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c492c1c-295a-4ea5-b06f-7701666092b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.620289] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1479.620289] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eabc5e-bc5f-3260-1359-2ec542bcb5d4" [ 1479.620289] env[63297]: _type = "Task" [ 1479.620289] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.631152] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eabc5e-bc5f-3260-1359-2ec542bcb5d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.810342] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697624, 'name': CloneVM_Task, 'duration_secs': 1.428945} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.814603] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Created linked-clone VM from snapshot [ 1479.815755] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7ab005-a913-4ed9-b0dc-cc5c91c03308 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.824383] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Uploading image 5948de63-767d-491d-a49f-0e746948145f {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1479.846729] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1479.846729] env[63297]: value = "vm-353918" [ 1479.846729] env[63297]: _type = "VirtualMachine" [ 1479.846729] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1479.847016] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1b939e94-ce3a-4564-8d9c-d1121cd11158 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.859204] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lease: (returnval){ [ 1479.859204] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5257f053-5279-a088-f892-e4e64487c9a6" [ 1479.859204] env[63297]: _type = "HttpNfcLease" [ 1479.859204] env[63297]: } obtained for exporting VM: (result){ [ 1479.859204] env[63297]: value = "vm-353918" [ 1479.859204] env[63297]: _type = "VirtualMachine" [ 1479.859204] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1479.859516] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the lease: (returnval){ [ 1479.859516] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5257f053-5279-a088-f892-e4e64487c9a6" [ 1479.859516] env[63297]: _type = "HttpNfcLease" [ 1479.859516] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1479.873220] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1479.873220] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5257f053-5279-a088-f892-e4e64487c9a6" [ 1479.873220] env[63297]: _type = "HttpNfcLease" [ 1479.873220] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1479.873505] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1479.873505] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5257f053-5279-a088-f892-e4e64487c9a6" [ 1479.873505] env[63297]: _type = "HttpNfcLease" [ 1479.873505] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1479.874318] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dffa755-f7d4-470b-b06f-2694980cee14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.884282] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e729db-6eb5-815b-ba1a-32c1bd54ba5c/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1479.884496] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e729db-6eb5-815b-ba1a-32c1bd54ba5c/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1479.983607] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7bf62b04-7a0c-4700-b6fc-a4ca70244c2a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.060730] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.140879] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52eabc5e-bc5f-3260-1359-2ec542bcb5d4, 'name': SearchDatastore_Task, 'duration_secs': 0.012767} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.146208] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fd17fdc-8bef-4c92-9dbe-38780b2fd43d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.154174] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1480.154174] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52696924-8e44-064d-ad36-253ffb069828" [ 1480.154174] env[63297]: _type = "Task" [ 1480.154174] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.172168] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52696924-8e44-064d-ad36-253ffb069828, 'name': SearchDatastore_Task, 'duration_secs': 0.012253} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.175590] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.176025] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] efaa465d-f6b2-4891-8e96-b4c3af052759/efaa465d-f6b2-4891-8e96-b4c3af052759.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1480.176633] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c53e3def-a2db-4297-8894-c5654df9643c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.187941] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1480.187941] env[63297]: value = "task-1697626" [ 1480.187941] env[63297]: _type = "Task" [ 1480.187941] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.197078] env[63297]: DEBUG nova.compute.manager [req-a8fd9f79-719b-4e00-a0ae-dec3ac7b3001 req-0ead4a6d-853c-4e5c-9726-9c162633b9ef service nova] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Received event network-vif-deleted-7c20637a-f8f5-4a26-b5af-07db4b1c9991 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.206076] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.308829] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d26ca3-57c5-4d02-ab3d-e75a9ebafda8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.321579] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559334dd-dd60-4abb-bfc3-3668186b2cda {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.330352] env[63297]: INFO nova.compute.manager [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Swapping old allocation on dict_keys(['88960333-a089-4255-ad72-5c02d57b2b35']) held by migration 0d35076e-6ba5-42c2-89c8-51053c88aa3a for instance [ 1480.364686] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e801b43-e7c8-485b-91a8-4aecde96a1eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.373713] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c00592d-e77f-4d66-8842-a5a2ad6b6958 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.392528] env[63297]: DEBUG nova.compute.provider_tree [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1480.396324] env[63297]: DEBUG nova.scheduler.client.report [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Overwriting current allocation {'allocations': {'88960333-a089-4255-ad72-5c02d57b2b35': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 97}}, 'project_id': 'fa1a1e0788594eb292e3fce25ed37bd2', 'user_id': 'fc041fcfaf8543ed829cb62fffa3b883', 'consumer_generation': 1} on consumer b95b7656-70ac-4eaf-9934-4b4c50e78035 {{(pid=63297) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1480.478939] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.479167] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquired lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.479370] env[63297]: DEBUG nova.network.neutron [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1480.703697] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697626, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.896635] env[63297]: DEBUG nova.scheduler.client.report [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1481.203379] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596937} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.203784] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] efaa465d-f6b2-4891-8e96-b4c3af052759/efaa465d-f6b2-4891-8e96-b4c3af052759.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1481.204104] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1481.204424] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55f54273-f6e3-4e20-8e08-d03469fd8b16 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.211946] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1481.211946] env[63297]: value = "task-1697627" [ 1481.211946] env[63297]: _type = "Task" [ 1481.211946] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.220846] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.222109] env[63297]: DEBUG nova.network.neutron [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance_info_cache with network_info: [{"id": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "address": "fa:16:3e:21:7c:1d", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9a7e1f4-1a", "ovs_interfaceid": "d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.402212] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.402809] env[63297]: DEBUG nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1481.409233] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.694s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.409233] env[63297]: DEBUG nova.objects.instance [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lazy-loading 'resources' on Instance uuid d2436717-7230-448f-b310-d062b1f11c52 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1481.723608] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080363} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.723884] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1481.724440] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Releasing lock "refresh_cache-b95b7656-70ac-4eaf-9934-4b4c50e78035" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.724869] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1481.725666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd85ac82-afdd-4046-a2de-3248237eacb9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.728294] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-686e4803-a3d7-4a5d-bc56-7289943dd60f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.752462] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] efaa465d-f6b2-4891-8e96-b4c3af052759/efaa465d-f6b2-4891-8e96-b4c3af052759.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1481.754088] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d376c85-edc7-4a30-9a42-686f2a63f714 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.769121] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1481.769121] env[63297]: value = "task-1697628" [ 1481.769121] env[63297]: _type = "Task" [ 1481.769121] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.778463] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.779556] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1481.779556] env[63297]: value = "task-1697629" [ 1481.779556] env[63297]: _type = "Task" [ 1481.779556] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.788776] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697629, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.910156] env[63297]: DEBUG nova.compute.utils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1481.915217] env[63297]: DEBUG nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1481.915379] env[63297]: DEBUG nova.network.neutron [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1481.990496] env[63297]: DEBUG nova.policy [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46e99cac1e2542538cc59c726b0224a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d4ff8789e0a48048ce6ca0f9503c05a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1482.281499] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697628, 'name': PowerOffVM_Task, 'duration_secs': 0.232233} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.285645] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1482.286497] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:24:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5cd2709a-9ccb-43da-a92d-61f75514f90c',id=29,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-114049318',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1482.286833] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1482.287117] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1482.287525] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1482.287647] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1482.287829] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1482.288051] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1482.288215] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1482.288380] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1482.288537] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1482.288792] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1482.296990] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70ec7c2f-28a4-4f88-b828-67c64f431beb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.314743] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697629, 'name': ReconfigVM_Task, 'duration_secs': 0.435729} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.315622] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Reconfigured VM instance instance-00000045 to attach disk [datastore1] efaa465d-f6b2-4891-8e96-b4c3af052759/efaa465d-f6b2-4891-8e96-b4c3af052759.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1482.316368] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1482.316368] env[63297]: value = "task-1697630" [ 1482.316368] env[63297]: _type = "Task" [ 1482.316368] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.316915] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-705e7096-9568-4116-94fb-c782aa30ceca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.335359] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697630, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.336347] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1482.336347] env[63297]: value = "task-1697631" [ 1482.336347] env[63297]: _type = "Task" [ 1482.336347] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.345867] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697631, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.365033] env[63297]: DEBUG nova.network.neutron [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Successfully created port: 20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1482.402164] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40510195-49c9-4fb1-aa94-3147832d87ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.410828] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43432d0d-06f7-4e09-a2ed-c0f3b2c14067 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.415784] env[63297]: DEBUG nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1482.445737] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95cbc47-c9b3-49d7-8414-82f2962ba1ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.454658] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb31629-5d63-48a6-9f76-54eae377c157 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.470804] env[63297]: DEBUG nova.compute.provider_tree [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.831433] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.847660] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697631, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.973708] env[63297]: DEBUG nova.scheduler.client.report [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1483.330836] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.347763] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697631, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.452449] env[63297]: DEBUG nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1483.479258] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1483.479518] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1483.479677] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.479858] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1483.480061] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.480228] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1483.480441] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1483.480607] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1483.480781] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1483.480942] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1483.481136] env[63297]: DEBUG nova.virt.hardware [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.481922] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.076s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.484469] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80e1a85-c566-4345-a282-543352e9ced6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.487656] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.122s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.489214] env[63297]: INFO nova.compute.claims [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.498816] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ba5d82-9b41-47d7-ac9e-4cc218bebffe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.514799] env[63297]: INFO nova.scheduler.client.report [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Deleted allocations for instance d2436717-7230-448f-b310-d062b1f11c52 [ 1483.834742] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697630, 'name': ReconfigVM_Task, 'duration_secs': 1.110747} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.834742] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4452e2a1-bc0e-47b9-aa18-6ab25ac331c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.861253] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:24:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5cd2709a-9ccb-43da-a92d-61f75514f90c',id=29,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-114049318',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1483.861457] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1483.861634] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.861827] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1483.861973] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.862200] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1483.862432] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1483.862605] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1483.862775] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1483.864711] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1483.865289] env[63297]: DEBUG nova.virt.hardware [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.866762] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d7e00cb-90d4-442d-9c97-ba8826134998 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.872468] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697631, 'name': Rename_Task, 'duration_secs': 1.080002} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.873572] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1483.873864] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cceed9b5-12fa-404b-90fd-2a082ca207d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.877394] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1483.877394] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52064713-fdfd-d627-b621-3e19af156a09" [ 1483.877394] env[63297]: _type = "Task" [ 1483.877394] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.884414] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1483.884414] env[63297]: value = "task-1697632" [ 1483.884414] env[63297]: _type = "Task" [ 1483.884414] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.888116] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52064713-fdfd-d627-b621-3e19af156a09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.899015] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.943806] env[63297]: DEBUG nova.compute.manager [req-58efc586-effb-4c07-8d1d-091c83487e52 req-507c7320-a6c1-4ec5-ad37-6129c91eada6 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Received event network-vif-plugged-20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1483.944058] env[63297]: DEBUG oslo_concurrency.lockutils [req-58efc586-effb-4c07-8d1d-091c83487e52 req-507c7320-a6c1-4ec5-ad37-6129c91eada6 service nova] Acquiring lock "ac112251-8cc3-4f57-8983-8a07e2a068f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.944275] env[63297]: DEBUG oslo_concurrency.lockutils [req-58efc586-effb-4c07-8d1d-091c83487e52 req-507c7320-a6c1-4ec5-ad37-6129c91eada6 service nova] Lock "ac112251-8cc3-4f57-8983-8a07e2a068f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.944443] env[63297]: DEBUG oslo_concurrency.lockutils [req-58efc586-effb-4c07-8d1d-091c83487e52 req-507c7320-a6c1-4ec5-ad37-6129c91eada6 service nova] Lock "ac112251-8cc3-4f57-8983-8a07e2a068f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.944613] env[63297]: DEBUG nova.compute.manager [req-58efc586-effb-4c07-8d1d-091c83487e52 req-507c7320-a6c1-4ec5-ad37-6129c91eada6 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] No waiting events found dispatching network-vif-plugged-20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1483.944794] env[63297]: WARNING nova.compute.manager [req-58efc586-effb-4c07-8d1d-091c83487e52 req-507c7320-a6c1-4ec5-ad37-6129c91eada6 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Received unexpected event network-vif-plugged-20681722-92b3-46f4-bd82-1775db48a289 for instance with vm_state building and task_state spawning. [ 1484.007416] env[63297]: DEBUG nova.network.neutron [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Successfully updated port: 20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1484.022607] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7657d5c5-e55d-4b10-a38a-925e88e66a00 tempest-InstanceActionsTestJSON-948145691 tempest-InstanceActionsTestJSON-948145691-project-member] Lock "d2436717-7230-448f-b310-d062b1f11c52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.269s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.390775] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52064713-fdfd-d627-b621-3e19af156a09, 'name': SearchDatastore_Task, 'duration_secs': 0.010102} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.403167] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1484.403167] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f103e0b-5f3c-4677-baf7-3eb85941fa01 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.421728] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697632, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.423251] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1484.423251] env[63297]: value = "task-1697633" [ 1484.423251] env[63297]: _type = "Task" [ 1484.423251] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.432515] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697633, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.511028] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquiring lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.511206] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquired lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.511363] env[63297]: DEBUG nova.network.neutron [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1484.904783] env[63297]: DEBUG oslo_vmware.api [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697632, 'name': PowerOnVM_Task, 'duration_secs': 0.556814} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.905149] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1484.905405] env[63297]: INFO nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Took 9.17 seconds to spawn the instance on the hypervisor. [ 1484.905646] env[63297]: DEBUG nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1484.906579] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d88a6b-204f-4bb4-8509-9eb8af5a0cb3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.911592] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d8bd7d-e246-466d-9c27-2021ef411527 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.924240] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfca871-abd4-4d5a-a5e7-ba3b4ccc5395 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.939603] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697633, 'name': ReconfigVM_Task, 'duration_secs': 0.252392} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.968091] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1484.969946] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c59bf8-a154-4e0f-b536-8c26016d6690 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.973117] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83203e7c-b0c3-4fdf-b852-09716993248c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.993588] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55e1b79-00d6-4efb-ae9e-40b18ac157f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.005615] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035/b95b7656-70ac-4eaf-9934-4b4c50e78035.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1485.006211] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acf75877-60cd-4a2d-ada0-c637f908c538 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.030291] env[63297]: DEBUG nova.compute.provider_tree [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.033146] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1485.033146] env[63297]: value = "task-1697634" [ 1485.033146] env[63297]: _type = "Task" [ 1485.033146] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.043754] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697634, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.111574] env[63297]: DEBUG nova.network.neutron [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1485.392527] env[63297]: DEBUG nova.network.neutron [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Updating instance_info_cache with network_info: [{"id": "20681722-92b3-46f4-bd82-1775db48a289", "address": "fa:16:3e:fd:f0:7d", "network": {"id": "2305a114-386a-45f6-9253-caa7ef7bb250", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1353388897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d4ff8789e0a48048ce6ca0f9503c05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20681722-92", "ovs_interfaceid": "20681722-92b3-46f4-bd82-1775db48a289", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.426323] env[63297]: INFO nova.compute.manager [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Took 63.42 seconds to build instance. [ 1485.535661] env[63297]: DEBUG nova.scheduler.client.report [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1485.553301] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697634, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.895181] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Releasing lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.895575] env[63297]: DEBUG nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Instance network_info: |[{"id": "20681722-92b3-46f4-bd82-1775db48a289", "address": "fa:16:3e:fd:f0:7d", "network": {"id": "2305a114-386a-45f6-9253-caa7ef7bb250", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1353388897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d4ff8789e0a48048ce6ca0f9503c05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20681722-92", "ovs_interfaceid": "20681722-92b3-46f4-bd82-1775db48a289", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1485.896110] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:f0:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20681722-92b3-46f4-bd82-1775db48a289', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1485.904586] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Creating folder: Project (1d4ff8789e0a48048ce6ca0f9503c05a). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1485.904890] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68670179-1130-4d2c-aa08-0b863e609352 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.917532] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Created folder: Project (1d4ff8789e0a48048ce6ca0f9503c05a) in parent group-v353718. [ 1485.917732] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Creating folder: Instances. Parent ref: group-v353919. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1485.917997] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79d1d99c-a1a9-47d2-a687-72825df31b02 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.928625] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Created folder: Instances in parent group-v353919. [ 1485.928893] env[63297]: DEBUG oslo.service.loopingcall [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1485.929123] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1485.929355] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c16b8ed-1f28-4d4a-ac74-48ccd5b17542 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.944688] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f0e21d75-df6c-453d-8794-fbd4412dcb4d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.723s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.950691] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1485.950691] env[63297]: value = "task-1697637" [ 1485.950691] env[63297]: _type = "Task" [ 1485.950691] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.959994] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697637, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.047771] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.048493] env[63297]: DEBUG nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1486.054485] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.106s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.054652] env[63297]: DEBUG nova.objects.instance [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lazy-loading 'resources' on Instance uuid e7fae121-174f-4955-a185-b3f92c6ab110 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1486.056627] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697634, 'name': ReconfigVM_Task, 'duration_secs': 0.53489} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.056627] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Reconfigured VM instance instance-00000030 to attach disk [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035/b95b7656-70ac-4eaf-9934-4b4c50e78035.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1486.057465] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c04f0fb-4079-4295-a77f-94a470a40d0a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.080143] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad97e109-a35e-4096-84fc-b4c1bae26659 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.083905] env[63297]: DEBUG nova.compute.manager [req-e478644f-fd90-4e0a-aa92-8a7869d10f5a req-4550a4a6-a6c5-4600-b1fd-882889b19009 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Received event network-changed-20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1486.084145] env[63297]: DEBUG nova.compute.manager [req-e478644f-fd90-4e0a-aa92-8a7869d10f5a req-4550a4a6-a6c5-4600-b1fd-882889b19009 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Refreshing instance network info cache due to event network-changed-20681722-92b3-46f4-bd82-1775db48a289. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1486.084377] env[63297]: DEBUG oslo_concurrency.lockutils [req-e478644f-fd90-4e0a-aa92-8a7869d10f5a req-4550a4a6-a6c5-4600-b1fd-882889b19009 service nova] Acquiring lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.084523] env[63297]: DEBUG oslo_concurrency.lockutils [req-e478644f-fd90-4e0a-aa92-8a7869d10f5a req-4550a4a6-a6c5-4600-b1fd-882889b19009 service nova] Acquired lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.084678] env[63297]: DEBUG nova.network.neutron [req-e478644f-fd90-4e0a-aa92-8a7869d10f5a req-4550a4a6-a6c5-4600-b1fd-882889b19009 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Refreshing network info cache for port 20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1486.104812] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f5b181-9f59-45e9-b88e-04654fc3b783 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.126320] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc73cc4b-d27e-40d5-9baa-d4e0a8291706 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.134950] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1486.135256] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-779cca17-228e-4467-ad55-8b93e3bd5da6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.145555] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1486.145555] env[63297]: value = "task-1697638" [ 1486.145555] env[63297]: _type = "Task" [ 1486.145555] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.154726] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.461818] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697637, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.561188] env[63297]: DEBUG nova.compute.utils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1486.563546] env[63297]: DEBUG nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1486.563546] env[63297]: DEBUG nova.network.neutron [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1486.652563] env[63297]: DEBUG nova.policy [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1486.660178] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697638, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.855813] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "efaa465d-f6b2-4891-8e96-b4c3af052759" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.855813] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.904035] env[63297]: DEBUG nova.network.neutron [req-e478644f-fd90-4e0a-aa92-8a7869d10f5a req-4550a4a6-a6c5-4600-b1fd-882889b19009 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Updated VIF entry in instance network info cache for port 20681722-92b3-46f4-bd82-1775db48a289. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1486.904413] env[63297]: DEBUG nova.network.neutron [req-e478644f-fd90-4e0a-aa92-8a7869d10f5a req-4550a4a6-a6c5-4600-b1fd-882889b19009 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Updating instance_info_cache with network_info: [{"id": "20681722-92b3-46f4-bd82-1775db48a289", "address": "fa:16:3e:fd:f0:7d", "network": {"id": "2305a114-386a-45f6-9253-caa7ef7bb250", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1353388897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d4ff8789e0a48048ce6ca0f9503c05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20681722-92", "ovs_interfaceid": "20681722-92b3-46f4-bd82-1775db48a289", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.966289] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697637, 'name': CreateVM_Task, 'duration_secs': 0.785881} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.968970] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1486.970306] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.970306] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.970533] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1486.974040] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a2c022c-ef51-44aa-825d-a22a4c2ebf70 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.976285] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1486.976285] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5212e912-9683-5334-3c87-0de17ca1a1d1" [ 1486.976285] env[63297]: _type = "Task" [ 1486.976285] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.997989] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5212e912-9683-5334-3c87-0de17ca1a1d1, 'name': SearchDatastore_Task, 'duration_secs': 0.01021} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.001211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.001503] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1487.001746] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.001894] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.002082] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1487.002985] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-485f2565-1607-49e1-90ae-3cebcef633c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.013486] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1487.014930] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1487.014930] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02007072-2d80-4839-926f-89685e0cde5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.023992] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1487.023992] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52568dc4-82b8-1f73-c368-c693182c57c0" [ 1487.023992] env[63297]: _type = "Task" [ 1487.023992] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.034827] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52568dc4-82b8-1f73-c368-c693182c57c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.048443] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc84d7d-0650-4d2a-9d0a-c0b8d839952e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.058519] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aad889b-7073-41a5-bd04-3ddedc9138ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.064920] env[63297]: DEBUG nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1487.095553] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e729db-6eb5-815b-ba1a-32c1bd54ba5c/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1487.096655] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19b5a01-fa96-4290-9fcc-be36b43d4512 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.101025] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf2d80c-2519-4925-b967-1b30aa85eee2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.107860] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e729db-6eb5-815b-ba1a-32c1bd54ba5c/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1487.108047] env[63297]: ERROR oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e729db-6eb5-815b-ba1a-32c1bd54ba5c/disk-0.vmdk due to incomplete transfer. [ 1487.109792] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d6d2c9bc-16af-44f7-a6f5-b26530e4623b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.112327] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b33bcaa-38b4-4bad-93c0-542e9f6a72a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.127094] env[63297]: DEBUG nova.compute.provider_tree [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1487.130061] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e729db-6eb5-815b-ba1a-32c1bd54ba5c/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1487.130309] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Uploaded image 5948de63-767d-491d-a49f-0e746948145f to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1487.133177] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1487.133907] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7cb2bae9-7483-47f6-a2f0-6ebd559aecce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.141971] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1487.141971] env[63297]: value = "task-1697639" [ 1487.141971] env[63297]: _type = "Task" [ 1487.141971] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.153870] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697639, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.159298] env[63297]: DEBUG oslo_vmware.api [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697638, 'name': PowerOnVM_Task, 'duration_secs': 0.756775} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.159496] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1487.318904] env[63297]: DEBUG nova.network.neutron [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Successfully created port: 6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.360108] env[63297]: DEBUG nova.compute.utils [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1487.407335] env[63297]: DEBUG oslo_concurrency.lockutils [req-e478644f-fd90-4e0a-aa92-8a7869d10f5a req-4550a4a6-a6c5-4600-b1fd-882889b19009 service nova] Releasing lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.536460] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52568dc4-82b8-1f73-c368-c693182c57c0, 'name': SearchDatastore_Task, 'duration_secs': 0.012645} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.537810] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94cbd892-2db4-47f9-a32b-111fcd969327 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.543823] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1487.543823] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5212a0f4-34fa-eb97-7207-01f1e379dabf" [ 1487.543823] env[63297]: _type = "Task" [ 1487.543823] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.552388] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5212a0f4-34fa-eb97-7207-01f1e379dabf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.634932] env[63297]: DEBUG nova.scheduler.client.report [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1487.653413] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697639, 'name': Destroy_Task, 'duration_secs': 0.398787} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.653672] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Destroyed the VM [ 1487.653934] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1487.654211] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-40ca72fe-7edd-4ccd-940d-f67d420a8422 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.662016] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1487.662016] env[63297]: value = "task-1697640" [ 1487.662016] env[63297]: _type = "Task" [ 1487.662016] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.675419] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697640, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.865201] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.057704] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5212a0f4-34fa-eb97-7207-01f1e379dabf, 'name': SearchDatastore_Task, 'duration_secs': 0.009869} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.057704] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.058500] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ac112251-8cc3-4f57-8983-8a07e2a068f8/ac112251-8cc3-4f57-8983-8a07e2a068f8.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1488.058988] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55a5ec39-e229-46a4-b63d-d3b6def5b3b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.073024] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1488.073024] env[63297]: value = "task-1697641" [ 1488.073024] env[63297]: _type = "Task" [ 1488.073024] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.080870] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.106047] env[63297]: DEBUG nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1488.133280] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1488.133280] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1488.133280] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1488.133280] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1488.133280] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1488.133713] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1488.134132] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1488.134457] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1488.134765] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1488.137059] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1488.137059] env[63297]: DEBUG nova.virt.hardware [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1488.137059] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f067af08-b460-4e0c-86e9-8b3151d6d9dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.139943] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.086s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.143366] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.920s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.144224] env[63297]: DEBUG nova.objects.instance [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lazy-loading 'resources' on Instance uuid f429dd9b-be6c-4e90-876b-3a3931fb1c4a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1488.152079] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdfe917-3d08-41bb-90f6-85adc258d6c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.177166] env[63297]: INFO nova.compute.manager [None req-8f8486d0-a58f-4b6b-b30a-1249b01d3cff tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance to original state: 'active' [ 1488.181694] env[63297]: INFO nova.scheduler.client.report [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Deleted allocations for instance e7fae121-174f-4955-a185-b3f92c6ab110 [ 1488.189190] env[63297]: DEBUG oslo_vmware.api [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697640, 'name': RemoveSnapshot_Task, 'duration_secs': 0.443595} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.189463] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1488.189717] env[63297]: INFO nova.compute.manager [None req-c6c37244-be73-44ac-a763-2e222763af3d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Took 11.53 seconds to snapshot the instance on the hypervisor. [ 1488.582642] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697641, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487293} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.582954] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ac112251-8cc3-4f57-8983-8a07e2a068f8/ac112251-8cc3-4f57-8983-8a07e2a068f8.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1488.583215] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1488.583549] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7ddc457-4740-4abe-b5e7-92a86af7bcdd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.591935] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1488.591935] env[63297]: value = "task-1697642" [ 1488.591935] env[63297]: _type = "Task" [ 1488.591935] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.603049] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697642, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.697041] env[63297]: DEBUG oslo_concurrency.lockutils [None req-66b76659-3945-4664-91f7-d2261121de9a tempest-ImagesOneServerNegativeTestJSON-1190445103 tempest-ImagesOneServerNegativeTestJSON-1190445103-project-member] Lock "e7fae121-174f-4955-a185-b3f92c6ab110" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.708s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.952018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "efaa465d-f6b2-4891-8e96-b4c3af052759" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.952018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.952018] env[63297]: INFO nova.compute.manager [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Attaching volume 3b3e5994-2524-4c40-a237-0f11786f09c9 to /dev/sdb [ 1489.004938] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e8c35c-3195-4234-b041-51ca588fbb3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.022449] env[63297]: DEBUG nova.network.neutron [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Successfully updated port: 6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1489.024275] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8094e12-d48b-4439-adc4-01f326aee9bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.032246] env[63297]: DEBUG nova.compute.manager [req-b79fdc27-f4a9-4773-adc7-bc2a1cb09884 req-00210c9d-a511-4ca8-a8cb-df0036617d2e service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-vif-plugged-6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.032246] env[63297]: DEBUG oslo_concurrency.lockutils [req-b79fdc27-f4a9-4773-adc7-bc2a1cb09884 req-00210c9d-a511-4ca8-a8cb-df0036617d2e service nova] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.032246] env[63297]: DEBUG oslo_concurrency.lockutils [req-b79fdc27-f4a9-4773-adc7-bc2a1cb09884 req-00210c9d-a511-4ca8-a8cb-df0036617d2e service nova] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.032246] env[63297]: DEBUG oslo_concurrency.lockutils [req-b79fdc27-f4a9-4773-adc7-bc2a1cb09884 req-00210c9d-a511-4ca8-a8cb-df0036617d2e service nova] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.032246] env[63297]: DEBUG nova.compute.manager [req-b79fdc27-f4a9-4773-adc7-bc2a1cb09884 req-00210c9d-a511-4ca8-a8cb-df0036617d2e service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] No waiting events found dispatching network-vif-plugged-6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1489.032246] env[63297]: WARNING nova.compute.manager [req-b79fdc27-f4a9-4773-adc7-bc2a1cb09884 req-00210c9d-a511-4ca8-a8cb-df0036617d2e service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received unexpected event network-vif-plugged-6e95dd8f-a3a6-4449-a572-aba4792afffe for instance with vm_state building and task_state spawning. [ 1489.043584] env[63297]: DEBUG nova.virt.block_device [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Updating existing volume attachment record: a573b62b-4f24-4611-8d6a-bf66310eb74b {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1489.107828] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697642, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076436} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.109233] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1489.109417] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f1d0fe-207f-4419-8635-6114ef7b7a7a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.134607] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] ac112251-8cc3-4f57-8983-8a07e2a068f8/ac112251-8cc3-4f57-8983-8a07e2a068f8.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1489.137641] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c9757c0-6443-4c91-94aa-f62125235255 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.160026] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1489.160026] env[63297]: value = "task-1697643" [ 1489.160026] env[63297]: _type = "Task" [ 1489.160026] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.170666] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697643, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.213593] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ad0cff-cc27-4a85-9bbc-6a43e473957f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.231752] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bc1532-2357-4a3a-9ecb-6fb17af67eb5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.267694] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b94f4f-ad6c-4bea-a179-3bbf0a6e8ef5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.276922] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea6d553-2a4f-48c1-a762-9711c4ee1181 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.292587] env[63297]: DEBUG nova.compute.provider_tree [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1489.528396] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.528535] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.528683] env[63297]: DEBUG nova.network.neutron [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1489.675088] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697643, 'name': ReconfigVM_Task, 'duration_secs': 0.359066} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.675088] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Reconfigured VM instance instance-00000046 to attach disk [datastore1] ac112251-8cc3-4f57-8983-8a07e2a068f8/ac112251-8cc3-4f57-8983-8a07e2a068f8.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1489.675384] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9739caba-32f5-4199-83d2-4ebc4865a7e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.684509] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1489.684509] env[63297]: value = "task-1697647" [ 1489.684509] env[63297]: _type = "Task" [ 1489.684509] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.695868] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697647, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.799425] env[63297]: DEBUG nova.scheduler.client.report [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1489.861572] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "71faf167-dfe3-4792-9841-b5ab4b333884" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.861962] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "71faf167-dfe3-4792-9841-b5ab4b333884" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.862504] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "71faf167-dfe3-4792-9841-b5ab4b333884-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.863116] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "71faf167-dfe3-4792-9841-b5ab4b333884-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.863237] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "71faf167-dfe3-4792-9841-b5ab4b333884-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.866060] env[63297]: INFO nova.compute.manager [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Terminating instance [ 1489.869016] env[63297]: DEBUG nova.compute.manager [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1489.869261] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1489.870155] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3d68ba-ef75-4ca6-a136-d694b4711c6d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.880199] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1489.880199] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c1f5a77-1baa-4a1a-8a6e-50e4bb216723 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.949373] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "b95b7656-70ac-4eaf-9934-4b4c50e78035" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.949373] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.949373] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.949373] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.949606] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.953153] env[63297]: INFO nova.compute.manager [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Terminating instance [ 1489.955245] env[63297]: DEBUG nova.compute.manager [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1489.955450] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1489.956356] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172e9e08-9176-4ef3-9674-8a36c612bebb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.967047] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1489.967598] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24a1730c-bb2e-40df-afb1-756b1876fba2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.976867] env[63297]: DEBUG oslo_vmware.api [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1489.976867] env[63297]: value = "task-1697649" [ 1489.976867] env[63297]: _type = "Task" [ 1489.976867] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.994276] env[63297]: DEBUG oslo_vmware.api [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.994581] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1489.994758] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1489.994954] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleting the datastore file [datastore1] 71faf167-dfe3-4792-9841-b5ab4b333884 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1489.995267] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5ec7a92-3915-4078-b78f-6ba169d77bd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.006021] env[63297]: DEBUG oslo_vmware.api [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1490.006021] env[63297]: value = "task-1697650" [ 1490.006021] env[63297]: _type = "Task" [ 1490.006021] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.013160] env[63297]: DEBUG oslo_vmware.api [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.093557] env[63297]: DEBUG nova.network.neutron [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1490.204412] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697647, 'name': Rename_Task, 'duration_secs': 0.17098} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.204859] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1490.205042] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d8112cc-ff8d-4dbc-aad4-020f02b2092b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.214151] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1490.214151] env[63297]: value = "task-1697651" [ 1490.214151] env[63297]: _type = "Task" [ 1490.214151] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.228317] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.308897] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.163s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.309154] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.093s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.311046] env[63297]: INFO nova.compute.claims [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1490.329122] env[63297]: INFO nova.scheduler.client.report [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Deleted allocations for instance f429dd9b-be6c-4e90-876b-3a3931fb1c4a [ 1490.331015] env[63297]: DEBUG nova.network.neutron [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.488647] env[63297]: DEBUG oslo_vmware.api [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697649, 'name': PowerOffVM_Task, 'duration_secs': 0.334301} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.489019] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1490.489257] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1490.489550] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89beeecb-f927-48fc-a11e-36b75357cf1d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.514415] env[63297]: DEBUG oslo_vmware.api [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281931} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.515928] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1490.516144] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1490.516322] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1490.516552] env[63297]: INFO nova.compute.manager [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1490.516771] env[63297]: DEBUG oslo.service.loopingcall [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1490.519289] env[63297]: DEBUG nova.compute.manager [-] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1490.519392] env[63297]: DEBUG nova.network.neutron [-] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1490.521395] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.521906] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.598443] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1490.598443] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1490.598443] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Deleting the datastore file [datastore1] b95b7656-70ac-4eaf-9934-4b4c50e78035 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1490.598443] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-874ec293-6c0c-40cf-b8cc-626d798e0138 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.604036] env[63297]: DEBUG oslo_vmware.api [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1490.604036] env[63297]: value = "task-1697653" [ 1490.604036] env[63297]: _type = "Task" [ 1490.604036] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.620623] env[63297]: DEBUG oslo_vmware.api [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.727193] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquiring lock "5a868570-7504-4262-80b2-a458c219e689" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.727375] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "5a868570-7504-4262-80b2-a458c219e689" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.736399] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697651, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.835403] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.835403] env[63297]: DEBUG nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Instance network_info: |[{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1490.836159] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:14:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e95dd8f-a3a6-4449-a572-aba4792afffe', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1490.844954] env[63297]: DEBUG oslo.service.loopingcall [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1490.850742] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1490.851246] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c3bff8b6-d4d9-49b9-adc1-4cb476c08f8c tempest-ListServerFiltersTestJSON-855101693 tempest-ListServerFiltersTestJSON-855101693-project-member] Lock "f429dd9b-be6c-4e90-876b-3a3931fb1c4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.814s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.852172] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0722fe3-bba3-4fb4-b0cc-6fc74219c408 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.876596] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1490.876596] env[63297]: value = "task-1697654" [ 1490.876596] env[63297]: _type = "Task" [ 1490.876596] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.886118] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697654, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.025491] env[63297]: DEBUG nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1491.067564] env[63297]: DEBUG nova.compute.manager [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-changed-6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1491.068130] env[63297]: DEBUG nova.compute.manager [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Refreshing instance network info cache due to event network-changed-6e95dd8f-a3a6-4449-a572-aba4792afffe. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1491.068445] env[63297]: DEBUG oslo_concurrency.lockutils [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] Acquiring lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.068691] env[63297]: DEBUG oslo_concurrency.lockutils [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] Acquired lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.068828] env[63297]: DEBUG nova.network.neutron [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Refreshing network info cache for port 6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1491.118361] env[63297]: DEBUG oslo_vmware.api [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171103} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.121018] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1491.121018] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1491.121018] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1491.121018] env[63297]: INFO nova.compute.manager [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1491.121018] env[63297]: DEBUG oslo.service.loopingcall [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1491.121018] env[63297]: DEBUG nova.compute.manager [-] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1491.121018] env[63297]: DEBUG nova.network.neutron [-] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1491.232021] env[63297]: DEBUG oslo_vmware.api [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697651, 'name': PowerOnVM_Task, 'duration_secs': 0.549937} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.232021] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1491.232021] env[63297]: INFO nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Took 7.78 seconds to spawn the instance on the hypervisor. [ 1491.232021] env[63297]: DEBUG nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1491.232021] env[63297]: DEBUG nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1491.233519] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b730a5-6947-480a-a6eb-ce6964a6d975 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.391665] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697654, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.508604] env[63297]: DEBUG nova.network.neutron [-] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.557199] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.756657] env[63297]: INFO nova.compute.manager [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Took 64.20 seconds to build instance. [ 1491.769623] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.893402] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697654, 'name': CreateVM_Task, 'duration_secs': 0.676843} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.893617] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1491.894367] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.894493] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.894907] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1491.897630] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38398aec-8c83-4d82-8c23-221f86d80ba4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.907216] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1491.907216] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b78c1f-c88e-a7a0-56a4-9f7456d5e12e" [ 1491.907216] env[63297]: _type = "Task" [ 1491.907216] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.915914] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b78c1f-c88e-a7a0-56a4-9f7456d5e12e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.918206] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24c7cbc-3c7d-411c-bd1c-bb0ffd8efbb9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.926574] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b3c4db-29b9-4cd8-9509-d4ed3804c501 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.962375] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624bb6dc-0b62-4fdd-9050-728ed234d559 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.970098] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6be83f1-9bd8-4f22-85e1-7fa5161340f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.984853] env[63297]: DEBUG nova.compute.provider_tree [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1492.013637] env[63297]: INFO nova.compute.manager [-] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Took 1.49 seconds to deallocate network for instance. [ 1492.033793] env[63297]: DEBUG nova.network.neutron [-] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.057308] env[63297]: DEBUG nova.network.neutron [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updated VIF entry in instance network info cache for port 6e95dd8f-a3a6-4449-a572-aba4792afffe. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1492.057308] env[63297]: DEBUG nova.network.neutron [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.259792] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13b88598-f0b3-4e14-b581-ff717705a56b tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "ac112251-8cc3-4f57-8983-8a07e2a068f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.712s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.419188] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b78c1f-c88e-a7a0-56a4-9f7456d5e12e, 'name': SearchDatastore_Task, 'duration_secs': 0.010725} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.420408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.420408] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1492.420408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.420408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.420408] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1492.422018] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5830dc0c-8090-45e6-82a5-c29154cd2525 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.432773] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1492.432998] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1492.433948] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6efb5f99-4c2c-445e-839a-4109e42631ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.441487] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1492.441487] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b8cba5-80da-cc42-2eeb-df66904929ca" [ 1492.441487] env[63297]: _type = "Task" [ 1492.441487] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.451357] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b8cba5-80da-cc42-2eeb-df66904929ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.488598] env[63297]: DEBUG nova.scheduler.client.report [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1492.520143] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.540724] env[63297]: INFO nova.compute.manager [-] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Took 1.42 seconds to deallocate network for instance. [ 1492.560237] env[63297]: DEBUG oslo_concurrency.lockutils [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] Releasing lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.560237] env[63297]: DEBUG nova.compute.manager [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Received event network-vif-deleted-fb29440a-5455-4b45-b672-3aa307f31cf0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.562252] env[63297]: INFO nova.compute.manager [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Neutron deleted interface fb29440a-5455-4b45-b672-3aa307f31cf0; detaching it from the instance and deleting it from the info cache [ 1492.562252] env[63297]: DEBUG nova.network.neutron [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.954250] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b8cba5-80da-cc42-2eeb-df66904929ca, 'name': SearchDatastore_Task, 'duration_secs': 0.020831} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.955180] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7205ef7a-5845-4ea2-af4a-3d930647a735 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.962701] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1492.962701] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d493a-c727-d3fd-a04c-28ad33bd621d" [ 1492.962701] env[63297]: _type = "Task" [ 1492.962701] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.974996] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d493a-c727-d3fd-a04c-28ad33bd621d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.994650] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.995269] env[63297]: DEBUG nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1492.998743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.313s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.998986] env[63297]: DEBUG nova.objects.instance [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lazy-loading 'resources' on Instance uuid 708d1907-1619-4aa4-b0b3-ae58f046a760 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1493.047581] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.063807] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-609a550d-8d1d-4d14-adba-dac7fdb97832 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.076537] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f948e27c-f6f0-4917-a33c-f4b11be3f275 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.105585] env[63297]: DEBUG nova.compute.manager [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Received event network-vif-deleted-d9a7e1f4-1aaf-4c39-92ba-c79e986fc2fb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.105814] env[63297]: DEBUG nova.compute.manager [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Received event network-changed-20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.106032] env[63297]: DEBUG nova.compute.manager [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Refreshing instance network info cache due to event network-changed-20681722-92b3-46f4-bd82-1775db48a289. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1493.106278] env[63297]: DEBUG oslo_concurrency.lockutils [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] Acquiring lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.106416] env[63297]: DEBUG oslo_concurrency.lockutils [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] Acquired lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.106579] env[63297]: DEBUG nova.network.neutron [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Refreshing network info cache for port 20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.121726] env[63297]: DEBUG nova.compute.manager [req-9614cfb9-667f-435e-ac1c-d0e9db3fa7ae req-df2f3070-074d-4d97-bfd8-5d0f3184e2e7 service nova] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Detach interface failed, port_id=fb29440a-5455-4b45-b672-3aa307f31cf0, reason: Instance 71faf167-dfe3-4792-9841-b5ab4b333884 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1493.369246] env[63297]: DEBUG nova.network.neutron [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Updated VIF entry in instance network info cache for port 20681722-92b3-46f4-bd82-1775db48a289. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1493.369656] env[63297]: DEBUG nova.network.neutron [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Updating instance_info_cache with network_info: [{"id": "20681722-92b3-46f4-bd82-1775db48a289", "address": "fa:16:3e:fd:f0:7d", "network": {"id": "2305a114-386a-45f6-9253-caa7ef7bb250", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1353388897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d4ff8789e0a48048ce6ca0f9503c05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20681722-92", "ovs_interfaceid": "20681722-92b3-46f4-bd82-1775db48a289", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.475415] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d493a-c727-d3fd-a04c-28ad33bd621d, 'name': SearchDatastore_Task, 'duration_secs': 0.0282} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.475415] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.475625] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 66b7a1e5-5e74-49db-99f3-4427d7297bf2/66b7a1e5-5e74-49db-99f3-4427d7297bf2.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1493.475992] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbb0aa55-7534-471f-b399-40f74330072c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.485520] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1493.485520] env[63297]: value = "task-1697656" [ 1493.485520] env[63297]: _type = "Task" [ 1493.485520] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.497440] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.504446] env[63297]: DEBUG nova.compute.utils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1493.506302] env[63297]: DEBUG nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1493.506302] env[63297]: DEBUG nova.network.neutron [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1493.568225] env[63297]: DEBUG nova.policy [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0989a6e1e644ec1b4a179a58be8b946', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d9f790a174e4b7ea1c5646c3b7be873', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1493.605457] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1493.605698] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353923', 'volume_id': '3b3e5994-2524-4c40-a237-0f11786f09c9', 'name': 'volume-3b3e5994-2524-4c40-a237-0f11786f09c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'efaa465d-f6b2-4891-8e96-b4c3af052759', 'attached_at': '', 'detached_at': '', 'volume_id': '3b3e5994-2524-4c40-a237-0f11786f09c9', 'serial': '3b3e5994-2524-4c40-a237-0f11786f09c9'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1493.606691] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18155690-b627-4276-89c3-e221c0f544e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.628891] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc6b0c8-8842-43a4-917b-c61164751fd2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.657861] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] volume-3b3e5994-2524-4c40-a237-0f11786f09c9/volume-3b3e5994-2524-4c40-a237-0f11786f09c9.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1493.661556] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80db8a48-9aed-4e67-8ba7-5f21b4bea1f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.692248] env[63297]: DEBUG oslo_vmware.api [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1493.692248] env[63297]: value = "task-1697657" [ 1493.692248] env[63297]: _type = "Task" [ 1493.692248] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.707748] env[63297]: DEBUG oslo_vmware.api [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.875476] env[63297]: DEBUG oslo_concurrency.lockutils [req-a291ae3b-2260-49d9-a9a5-8eadedb804f0 req-7c9d4ec4-25ac-4d49-a0ed-0143fa89b147 service nova] Releasing lock "refresh_cache-ac112251-8cc3-4f57-8983-8a07e2a068f8" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.002781] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697656, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.011843] env[63297]: DEBUG nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1494.067954] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2b93c0-864f-434b-bf06-36eda299876f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.082185] env[63297]: DEBUG nova.network.neutron [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Successfully created port: bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1494.086870] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d4ee55-c752-4855-acb8-c3388f5e76af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.131169] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642a2033-c009-485e-8f73-f6d3b133092f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.144178] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfdf739-58ad-43db-bf70-fdba6e92ae82 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.162017] env[63297]: DEBUG nova.compute.provider_tree [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.186084] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.186383] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.186958] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "5124f7fb-1293-4964-98c4-426ecfce7d10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.186958] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.186958] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.189509] env[63297]: INFO nova.compute.manager [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Terminating instance [ 1494.192488] env[63297]: DEBUG nova.compute.manager [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1494.192694] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1494.193572] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a77559-3f7e-4c5b-bbce-f7a114743117 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.207571] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1494.211637] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f06d6bd-9a30-42b5-94f0-336c4b41a779 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.213848] env[63297]: DEBUG oslo_vmware.api [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.222337] env[63297]: DEBUG oslo_vmware.api [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1494.222337] env[63297]: value = "task-1697658" [ 1494.222337] env[63297]: _type = "Task" [ 1494.222337] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.230991] env[63297]: DEBUG oslo_vmware.api [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697658, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.497009] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697656, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713861} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.497309] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 66b7a1e5-5e74-49db-99f3-4427d7297bf2/66b7a1e5-5e74-49db-99f3-4427d7297bf2.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1494.497540] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1494.498066] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c539bb0-8b73-4d17-af86-f87db028ad9f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.506618] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1494.506618] env[63297]: value = "task-1697659" [ 1494.506618] env[63297]: _type = "Task" [ 1494.506618] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.520502] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697659, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.665866] env[63297]: DEBUG nova.scheduler.client.report [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1494.709149] env[63297]: DEBUG oslo_vmware.api [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697657, 'name': ReconfigVM_Task, 'duration_secs': 0.715304} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.709505] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Reconfigured VM instance instance-00000045 to attach disk [datastore1] volume-3b3e5994-2524-4c40-a237-0f11786f09c9/volume-3b3e5994-2524-4c40-a237-0f11786f09c9.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1494.714940] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73a577bf-30c2-4145-a50d-17176923d886 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.737470] env[63297]: DEBUG oslo_vmware.api [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697658, 'name': PowerOffVM_Task, 'duration_secs': 0.244124} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.739147] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1494.739229] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1494.739580] env[63297]: DEBUG oslo_vmware.api [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1494.739580] env[63297]: value = "task-1697660" [ 1494.739580] env[63297]: _type = "Task" [ 1494.739580] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.739778] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b322b96-1b9f-4a9b-9d28-69a99fda3178 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.752400] env[63297]: DEBUG oslo_vmware.api [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697660, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.857378] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1494.857852] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1494.858247] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Deleting the datastore file [datastore1] 5124f7fb-1293-4964-98c4-426ecfce7d10 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1494.858695] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2521bbfb-48c6-432d-92e5-ea0561446a7e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.868116] env[63297]: DEBUG oslo_vmware.api [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1494.868116] env[63297]: value = "task-1697662" [ 1494.868116] env[63297]: _type = "Task" [ 1494.868116] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.884616] env[63297]: DEBUG oslo_vmware.api [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.016658] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697659, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086664} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.016918] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1495.017760] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0268427b-d2b3-42ec-8162-d12ffe01db83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.023807] env[63297]: DEBUG nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1495.048226] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 66b7a1e5-5e74-49db-99f3-4427d7297bf2/66b7a1e5-5e74-49db-99f3-4427d7297bf2.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1495.048554] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2981c0cb-31a3-48ed-bf88-fa81e2e136b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.074467] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1495.074750] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1495.074911] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1495.075190] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1495.075385] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1495.075567] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1495.075834] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1495.076135] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1495.076322] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1495.076537] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1495.076753] env[63297]: DEBUG nova.virt.hardware [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1495.077801] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3c15d6-4dfc-42ee-97a4-f9c7d13a2674 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.081956] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1495.081956] env[63297]: value = "task-1697663" [ 1495.081956] env[63297]: _type = "Task" [ 1495.081956] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.092681] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed6c076-d268-4684-9966-129d647dd52b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.098049] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697663, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.172478] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.175665] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.859s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.176018] env[63297]: DEBUG nova.objects.instance [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lazy-loading 'resources' on Instance uuid c05a249e-ab88-41f0-81f5-b644b3da5d2d {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.196725] env[63297]: INFO nova.scheduler.client.report [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted allocations for instance 708d1907-1619-4aa4-b0b3-ae58f046a760 [ 1495.253360] env[63297]: DEBUG oslo_vmware.api [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697660, 'name': ReconfigVM_Task, 'duration_secs': 0.165064} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.254347] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353923', 'volume_id': '3b3e5994-2524-4c40-a237-0f11786f09c9', 'name': 'volume-3b3e5994-2524-4c40-a237-0f11786f09c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'efaa465d-f6b2-4891-8e96-b4c3af052759', 'attached_at': '', 'detached_at': '', 'volume_id': '3b3e5994-2524-4c40-a237-0f11786f09c9', 'serial': '3b3e5994-2524-4c40-a237-0f11786f09c9'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1495.378658] env[63297]: DEBUG oslo_vmware.api [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30963} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.378894] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1495.379090] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1495.379268] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1495.379440] env[63297]: INFO nova.compute.manager [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1495.379678] env[63297]: DEBUG oslo.service.loopingcall [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.379873] env[63297]: DEBUG nova.compute.manager [-] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1495.379967] env[63297]: DEBUG nova.network.neutron [-] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1495.594302] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697663, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.711093] env[63297]: DEBUG oslo_concurrency.lockutils [None req-beb540a1-c608-4205-9e44-9243fe9f07e4 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "708d1907-1619-4aa4-b0b3-ae58f046a760" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.215s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.869131] env[63297]: DEBUG nova.compute.manager [req-1e4e549e-d1e0-4704-84ad-28dc5c666fdb req-f49c2c74-a1bd-44d0-8f2d-67a1e92db801 service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Received event network-vif-plugged-bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.869131] env[63297]: DEBUG oslo_concurrency.lockutils [req-1e4e549e-d1e0-4704-84ad-28dc5c666fdb req-f49c2c74-a1bd-44d0-8f2d-67a1e92db801 service nova] Acquiring lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.869131] env[63297]: DEBUG oslo_concurrency.lockutils [req-1e4e549e-d1e0-4704-84ad-28dc5c666fdb req-f49c2c74-a1bd-44d0-8f2d-67a1e92db801 service nova] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.869131] env[63297]: DEBUG oslo_concurrency.lockutils [req-1e4e549e-d1e0-4704-84ad-28dc5c666fdb req-f49c2c74-a1bd-44d0-8f2d-67a1e92db801 service nova] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.869131] env[63297]: DEBUG nova.compute.manager [req-1e4e549e-d1e0-4704-84ad-28dc5c666fdb req-f49c2c74-a1bd-44d0-8f2d-67a1e92db801 service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] No waiting events found dispatching network-vif-plugged-bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1495.869131] env[63297]: WARNING nova.compute.manager [req-1e4e549e-d1e0-4704-84ad-28dc5c666fdb req-f49c2c74-a1bd-44d0-8f2d-67a1e92db801 service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Received unexpected event network-vif-plugged-bf9b1829-ba35-499e-993f-44fbd669974d for instance with vm_state building and task_state spawning. [ 1496.039259] env[63297]: DEBUG nova.network.neutron [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Successfully updated port: bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1496.098188] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697663, 'name': ReconfigVM_Task, 'duration_secs': 0.907833} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.101488] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 66b7a1e5-5e74-49db-99f3-4427d7297bf2/66b7a1e5-5e74-49db-99f3-4427d7297bf2.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1496.102469] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a4f6b01-b758-4003-a50b-3b996b90283e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.109493] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1496.109493] env[63297]: value = "task-1697664" [ 1496.109493] env[63297]: _type = "Task" [ 1496.109493] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.122510] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697664, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.192313] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5660b2e-d6b1-4599-bbf5-c6d0f3ea82e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.199295] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda77a7d-f5a0-4090-a402-5a0f019e5efe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.239884] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec2eb80-b28a-45b6-9e92-f0d0a709f670 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.244317] env[63297]: DEBUG nova.compute.manager [req-38c4d565-66fc-47be-ac26-088e695cf686 req-b5ec8c37-3dad-41ec-9626-311b9ceec6dc service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Received event network-vif-deleted-1b123801-2747-40a9-84bc-ae5dc9595556 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1496.244462] env[63297]: INFO nova.compute.manager [req-38c4d565-66fc-47be-ac26-088e695cf686 req-b5ec8c37-3dad-41ec-9626-311b9ceec6dc service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Neutron deleted interface 1b123801-2747-40a9-84bc-ae5dc9595556; detaching it from the instance and deleting it from the info cache [ 1496.244634] env[63297]: DEBUG nova.network.neutron [req-38c4d565-66fc-47be-ac26-088e695cf686 req-b5ec8c37-3dad-41ec-9626-311b9ceec6dc service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.254188] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7066fea-3e1b-4a89-879b-fb29d893f8df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.271866] env[63297]: DEBUG nova.compute.provider_tree [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.311168] env[63297]: DEBUG nova.objects.instance [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'flavor' on Instance uuid efaa465d-f6b2-4891-8e96-b4c3af052759 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1496.469752] env[63297]: DEBUG nova.network.neutron [-] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.543019] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquiring lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.543196] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquired lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.543539] env[63297]: DEBUG nova.network.neutron [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1496.620955] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697664, 'name': Rename_Task, 'duration_secs': 0.347169} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.621196] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1496.621342] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f09f471c-b6c4-4c90-9698-641aba2f8a48 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.629085] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1496.629085] env[63297]: value = "task-1697665" [ 1496.629085] env[63297]: _type = "Task" [ 1496.629085] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.637692] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.736114] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.736588] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.747783] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-013f8348-8eb4-4df2-a312-136f5a644b10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.759971] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91baa6c-d689-4cc3-8dae-94089ec9032a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.776695] env[63297]: DEBUG nova.scheduler.client.report [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1496.798291] env[63297]: DEBUG nova.compute.manager [req-38c4d565-66fc-47be-ac26-088e695cf686 req-b5ec8c37-3dad-41ec-9626-311b9ceec6dc service nova] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Detach interface failed, port_id=1b123801-2747-40a9-84bc-ae5dc9595556, reason: Instance 5124f7fb-1293-4964-98c4-426ecfce7d10 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1496.815615] env[63297]: DEBUG oslo_concurrency.lockutils [None req-53c0deda-f75e-463f-b2d3-c92acba5e97e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.864s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.973110] env[63297]: INFO nova.compute.manager [-] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Took 1.59 seconds to deallocate network for instance. [ 1497.098601] env[63297]: DEBUG nova.network.neutron [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1497.140937] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697665, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.243837] env[63297]: DEBUG nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1497.282550] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.285186] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.351s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.287317] env[63297]: INFO nova.compute.claims [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1497.317623] env[63297]: INFO nova.scheduler.client.report [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Deleted allocations for instance c05a249e-ab88-41f0-81f5-b644b3da5d2d [ 1497.466837] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "efaa465d-f6b2-4891-8e96-b4c3af052759" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.469384] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.469384] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "efaa465d-f6b2-4891-8e96-b4c3af052759-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.469384] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.469384] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.471518] env[63297]: INFO nova.compute.manager [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Terminating instance [ 1497.474250] env[63297]: DEBUG nova.compute.manager [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1497.474688] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1497.475130] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0f695cd-b494-4423-af6e-97e632813807 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.482485] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.484891] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1497.484891] env[63297]: value = "task-1697666" [ 1497.484891] env[63297]: _type = "Task" [ 1497.484891] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.495405] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.591703] env[63297]: DEBUG nova.network.neutron [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Updating instance_info_cache with network_info: [{"id": "bf9b1829-ba35-499e-993f-44fbd669974d", "address": "fa:16:3e:e2:48:de", "network": {"id": "5b91ea3f-f213-4700-80d0-18d8bfab4cf1", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1365683862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d9f790a174e4b7ea1c5646c3b7be873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf9b1829-ba", "ovs_interfaceid": "bf9b1829-ba35-499e-993f-44fbd669974d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.641654] env[63297]: DEBUG oslo_vmware.api [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1697665, 'name': PowerOnVM_Task, 'duration_secs': 0.906831} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.642203] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1497.642318] env[63297]: INFO nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Took 9.54 seconds to spawn the instance on the hypervisor. [ 1497.642504] env[63297]: DEBUG nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1497.643584] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df78f8f1-7ad2-400b-9e67-bc413201c39a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.761079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.827243] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdd7e678-5d50-4f12-98bd-dc0e223d7263 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "c05a249e-ab88-41f0-81f5-b644b3da5d2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.645s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.894689] env[63297]: DEBUG nova.compute.manager [req-2cb463bf-8688-4f27-818b-226adaa1b832 req-12d63b69-e94d-4d7b-b4fd-d2bd7d11bf58 service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Received event network-changed-bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1497.894887] env[63297]: DEBUG nova.compute.manager [req-2cb463bf-8688-4f27-818b-226adaa1b832 req-12d63b69-e94d-4d7b-b4fd-d2bd7d11bf58 service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Refreshing instance network info cache due to event network-changed-bf9b1829-ba35-499e-993f-44fbd669974d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1497.895086] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cb463bf-8688-4f27-818b-226adaa1b832 req-12d63b69-e94d-4d7b-b4fd-d2bd7d11bf58 service nova] Acquiring lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.994958] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697666, 'name': PowerOffVM_Task, 'duration_secs': 0.250208} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.995243] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1497.995439] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1497.995630] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353923', 'volume_id': '3b3e5994-2524-4c40-a237-0f11786f09c9', 'name': 'volume-3b3e5994-2524-4c40-a237-0f11786f09c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'efaa465d-f6b2-4891-8e96-b4c3af052759', 'attached_at': '', 'detached_at': '', 'volume_id': '3b3e5994-2524-4c40-a237-0f11786f09c9', 'serial': '3b3e5994-2524-4c40-a237-0f11786f09c9'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1497.996405] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223d3dfe-4397-4711-88c4-092750b07999 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.018624] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9cb351-ba37-4d19-8b26-fff1d113a518 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.026156] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf42506-8c13-4206-85a2-fb0af83f2333 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.047180] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea80082c-cf7e-42e5-998a-7bf4d70b08b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.063441] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] The volume has not been displaced from its original location: [datastore1] volume-3b3e5994-2524-4c40-a237-0f11786f09c9/volume-3b3e5994-2524-4c40-a237-0f11786f09c9.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1498.068859] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1498.069203] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13c03b24-1bd6-4902-9b92-b45b0c86b13c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.086528] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1498.086528] env[63297]: value = "task-1697667" [ 1498.086528] env[63297]: _type = "Task" [ 1498.086528] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.094176] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.094605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Releasing lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.094892] env[63297]: DEBUG nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Instance network_info: |[{"id": "bf9b1829-ba35-499e-993f-44fbd669974d", "address": "fa:16:3e:e2:48:de", "network": {"id": "5b91ea3f-f213-4700-80d0-18d8bfab4cf1", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1365683862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d9f790a174e4b7ea1c5646c3b7be873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf9b1829-ba", "ovs_interfaceid": "bf9b1829-ba35-499e-993f-44fbd669974d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1498.095167] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cb463bf-8688-4f27-818b-226adaa1b832 req-12d63b69-e94d-4d7b-b4fd-d2bd7d11bf58 service nova] Acquired lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.095347] env[63297]: DEBUG nova.network.neutron [req-2cb463bf-8688-4f27-818b-226adaa1b832 req-12d63b69-e94d-4d7b-b4fd-d2bd7d11bf58 service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Refreshing network info cache for port bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1498.096541] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:48:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf9b1829-ba35-499e-993f-44fbd669974d', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1498.103804] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Creating folder: Project (1d9f790a174e4b7ea1c5646c3b7be873). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1498.104291] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c13253f-e1d5-4212-bc2c-d9d20074211a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.115601] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Created folder: Project (1d9f790a174e4b7ea1c5646c3b7be873) in parent group-v353718. [ 1498.115792] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Creating folder: Instances. Parent ref: group-v353925. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1498.116028] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7472b63-2a36-481c-ab87-ab6931cb44e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.126098] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Created folder: Instances in parent group-v353925. [ 1498.126344] env[63297]: DEBUG oslo.service.loopingcall [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.126536] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1498.126738] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4d7f259-5e6d-45aa-9872-017af56c21a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.146507] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1498.146507] env[63297]: value = "task-1697670" [ 1498.146507] env[63297]: _type = "Task" [ 1498.146507] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.158427] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697670, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.160615] env[63297]: INFO nova.compute.manager [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Took 61.85 seconds to build instance. [ 1498.599852] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697667, 'name': ReconfigVM_Task, 'duration_secs': 0.241331} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.600655] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1498.612069] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44b87e62-ad30-4306-8a5d-199041aa6b75 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.633551] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1498.633551] env[63297]: value = "task-1697672" [ 1498.633551] env[63297]: _type = "Task" [ 1498.633551] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.643031] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697672, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.658501] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697670, 'name': CreateVM_Task, 'duration_secs': 0.413702} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.661256] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1498.662495] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.662495] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.662935] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1498.663201] env[63297]: DEBUG oslo_concurrency.lockutils [None req-99a57d83-6a8e-424c-b15b-675985f639ea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.364s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.663449] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93263803-b6b0-4f1f-889f-f2e2643e2e4d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.668979] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1498.668979] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cc18ef-68dd-6c07-2444-dd1b8c510497" [ 1498.668979] env[63297]: _type = "Task" [ 1498.668979] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.677123] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cc18ef-68dd-6c07-2444-dd1b8c510497, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.843711] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23966024-11e9-42dc-9ba5-748b012e69b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.851604] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d860f840-edc4-412e-a528-a8a8ecb706cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.886842] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c16c2b8-4e44-42bf-b64d-74e3ee7f52d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.895507] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c2f608-10cb-49ec-9ea5-eaa23ee37c89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.909687] env[63297]: DEBUG nova.compute.provider_tree [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.025497] env[63297]: DEBUG nova.network.neutron [req-2cb463bf-8688-4f27-818b-226adaa1b832 req-12d63b69-e94d-4d7b-b4fd-d2bd7d11bf58 service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Updated VIF entry in instance network info cache for port bf9b1829-ba35-499e-993f-44fbd669974d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1499.025860] env[63297]: DEBUG nova.network.neutron [req-2cb463bf-8688-4f27-818b-226adaa1b832 req-12d63b69-e94d-4d7b-b4fd-d2bd7d11bf58 service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Updating instance_info_cache with network_info: [{"id": "bf9b1829-ba35-499e-993f-44fbd669974d", "address": "fa:16:3e:e2:48:de", "network": {"id": "5b91ea3f-f213-4700-80d0-18d8bfab4cf1", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1365683862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d9f790a174e4b7ea1c5646c3b7be873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf9b1829-ba", "ovs_interfaceid": "bf9b1829-ba35-499e-993f-44fbd669974d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.142881] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697672, 'name': ReconfigVM_Task, 'duration_secs': 0.237986} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.143033] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353923', 'volume_id': '3b3e5994-2524-4c40-a237-0f11786f09c9', 'name': 'volume-3b3e5994-2524-4c40-a237-0f11786f09c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'efaa465d-f6b2-4891-8e96-b4c3af052759', 'attached_at': '', 'detached_at': '', 'volume_id': '3b3e5994-2524-4c40-a237-0f11786f09c9', 'serial': '3b3e5994-2524-4c40-a237-0f11786f09c9'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1499.143297] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1499.144857] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65d8346-af8b-40fc-8cac-eb96fcd1b9f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.152443] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1499.152710] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4652058-fe92-458a-b8fe-66ac0198d07d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.178898] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cc18ef-68dd-6c07-2444-dd1b8c510497, 'name': SearchDatastore_Task, 'duration_secs': 0.01165} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.179261] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.179495] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1499.179726] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.179871] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.180070] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1499.180618] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40653352-a358-4e3a-9c66-ab4a0bc4473f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.196281] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1499.196281] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1499.198429] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-392f53db-db0c-409b-840e-f35113f3dfe8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.203887] env[63297]: DEBUG nova.compute.manager [req-6e526750-1eda-4c93-87d6-83c67f779cdc req-0a04535c-c015-4dad-8707-33adab77fd54 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-changed-6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1499.203887] env[63297]: DEBUG nova.compute.manager [req-6e526750-1eda-4c93-87d6-83c67f779cdc req-0a04535c-c015-4dad-8707-33adab77fd54 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Refreshing instance network info cache due to event network-changed-6e95dd8f-a3a6-4449-a572-aba4792afffe. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1499.204066] env[63297]: DEBUG oslo_concurrency.lockutils [req-6e526750-1eda-4c93-87d6-83c67f779cdc req-0a04535c-c015-4dad-8707-33adab77fd54 service nova] Acquiring lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.204104] env[63297]: DEBUG oslo_concurrency.lockutils [req-6e526750-1eda-4c93-87d6-83c67f779cdc req-0a04535c-c015-4dad-8707-33adab77fd54 service nova] Acquired lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.204260] env[63297]: DEBUG nova.network.neutron [req-6e526750-1eda-4c93-87d6-83c67f779cdc req-0a04535c-c015-4dad-8707-33adab77fd54 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Refreshing network info cache for port 6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.213027] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1499.213027] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520ac564-0cc4-f104-08db-9b586468ef94" [ 1499.213027] env[63297]: _type = "Task" [ 1499.213027] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.223255] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520ac564-0cc4-f104-08db-9b586468ef94, 'name': SearchDatastore_Task, 'duration_secs': 0.010405} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.225158] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93933edf-bfc6-473d-ae60-58f16d15f73d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.230961] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1499.231187] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1499.231363] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleting the datastore file [datastore1] efaa465d-f6b2-4891-8e96-b4c3af052759 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.232014] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a6ab60f-4389-4035-af0e-329c53585743 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.236058] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1499.236058] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c28f40-2dce-0517-f33d-1a5aec501f98" [ 1499.236058] env[63297]: _type = "Task" [ 1499.236058] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.240580] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1499.240580] env[63297]: value = "task-1697674" [ 1499.240580] env[63297]: _type = "Task" [ 1499.240580] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.251737] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c28f40-2dce-0517-f33d-1a5aec501f98, 'name': SearchDatastore_Task, 'duration_secs': 0.010789} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.251737] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.251737] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 10def566-2d1f-4ea2-9df5-ebf4d77f7b48/10def566-2d1f-4ea2-9df5-ebf4d77f7b48.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1499.251737] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f7a89bf-add4-47ce-a646-fce0d9a69eef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.256403] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.264731] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1499.264731] env[63297]: value = "task-1697675" [ 1499.264731] env[63297]: _type = "Task" [ 1499.264731] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.274631] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697675, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.413194] env[63297]: DEBUG nova.scheduler.client.report [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1499.530575] env[63297]: DEBUG oslo_concurrency.lockutils [req-2cb463bf-8688-4f27-818b-226adaa1b832 req-12d63b69-e94d-4d7b-b4fd-d2bd7d11bf58 service nova] Releasing lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.755522] env[63297]: DEBUG oslo_vmware.api [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141315} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.755846] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1499.755981] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1499.756233] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1499.756390] env[63297]: INFO nova.compute.manager [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1499.756641] env[63297]: DEBUG oslo.service.loopingcall [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.756850] env[63297]: DEBUG nova.compute.manager [-] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1499.756943] env[63297]: DEBUG nova.network.neutron [-] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.778660] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697675, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.925465] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.925984] env[63297]: DEBUG nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1499.928682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.717s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.928861] env[63297]: DEBUG nova.objects.instance [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lazy-loading 'resources' on Instance uuid fba9040d-f904-44a1-8785-14d4696ea939 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1500.042468] env[63297]: DEBUG nova.network.neutron [req-6e526750-1eda-4c93-87d6-83c67f779cdc req-0a04535c-c015-4dad-8707-33adab77fd54 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updated VIF entry in instance network info cache for port 6e95dd8f-a3a6-4449-a572-aba4792afffe. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.042842] env[63297]: DEBUG nova.network.neutron [req-6e526750-1eda-4c93-87d6-83c67f779cdc req-0a04535c-c015-4dad-8707-33adab77fd54 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.281021] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697675, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658566} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.281021] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 10def566-2d1f-4ea2-9df5-ebf4d77f7b48/10def566-2d1f-4ea2-9df5-ebf4d77f7b48.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1500.281021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1500.281021] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d79c756f-aa44-4ea8-acd9-6eee1f8bdba9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.286402] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1500.286402] env[63297]: value = "task-1697676" [ 1500.286402] env[63297]: _type = "Task" [ 1500.286402] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.296222] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697676, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.347477] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "57d93827-2a5a-4f12-a74b-147a1a934dd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.347786] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "57d93827-2a5a-4f12-a74b-147a1a934dd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.431702] env[63297]: DEBUG nova.compute.utils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1500.434207] env[63297]: DEBUG nova.objects.instance [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lazy-loading 'numa_topology' on Instance uuid fba9040d-f904-44a1-8785-14d4696ea939 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1500.435490] env[63297]: DEBUG nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1500.435729] env[63297]: DEBUG nova.network.neutron [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1500.477378] env[63297]: DEBUG nova.policy [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e10106c41d7b493e8389f80ab08d648e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29c5c1c260474315a1a34b83a8054983', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1500.546547] env[63297]: DEBUG oslo_concurrency.lockutils [req-6e526750-1eda-4c93-87d6-83c67f779cdc req-0a04535c-c015-4dad-8707-33adab77fd54 service nova] Releasing lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.575283] env[63297]: DEBUG nova.network.neutron [-] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.792190] env[63297]: DEBUG nova.network.neutron [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Successfully created port: ca287ba8-2972-46d4-93dd-a3c4ce330623 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1500.800819] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697676, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.850987] env[63297]: DEBUG nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1500.934905] env[63297]: DEBUG nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1500.941234] env[63297]: DEBUG nova.objects.base [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1501.079423] env[63297]: INFO nova.compute.manager [-] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Took 1.32 seconds to deallocate network for instance. [ 1501.225325] env[63297]: DEBUG nova.compute.manager [req-480a2f22-7ab9-475e-a3d3-a4e26a580811 req-eaba25a6-d0d3-4ae1-b43a-b3fd1b54e47d service nova] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Received event network-vif-deleted-1188b948-a801-47e0-a828-a6ac36c83619 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1501.299779] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697676, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.849852} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.300117] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1501.300918] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb9e3ac-2658-4445-9489-df32819b4b1d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.333700] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 10def566-2d1f-4ea2-9df5-ebf4d77f7b48/10def566-2d1f-4ea2-9df5-ebf4d77f7b48.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1501.337658] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77164af7-8a7e-4e96-994e-67024befb8cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.377165] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1501.377165] env[63297]: value = "task-1697677" [ 1501.377165] env[63297]: _type = "Task" [ 1501.377165] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.382880] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.390299] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697677, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.400295] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c331b7-4dcb-43b5-82e5-75eca9089587 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.407554] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156724e2-2248-4d75-a25d-2141668585bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.439310] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b1fad6-cfbd-4a45-a389-8692d604b63c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.451086] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633e4288-75ad-4950-b363-fb5db0274f78 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.465236] env[63297]: DEBUG nova.compute.provider_tree [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.631889] env[63297]: INFO nova.compute.manager [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Took 0.55 seconds to detach 1 volumes for instance. [ 1501.888019] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697677, 'name': ReconfigVM_Task, 'duration_secs': 0.281158} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.888019] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 10def566-2d1f-4ea2-9df5-ebf4d77f7b48/10def566-2d1f-4ea2-9df5-ebf4d77f7b48.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1501.888553] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0b3af17-4225-4160-8b4e-af774ace3928 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.895816] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1501.895816] env[63297]: value = "task-1697678" [ 1501.895816] env[63297]: _type = "Task" [ 1501.895816] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.904756] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697678, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.957642] env[63297]: DEBUG nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1501.968773] env[63297]: DEBUG nova.scheduler.client.report [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1501.984339] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1501.984591] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1501.984745] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.984920] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1501.985076] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.985226] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1501.985433] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1501.985591] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1501.985754] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1501.985951] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1501.986101] env[63297]: DEBUG nova.virt.hardware [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1501.987010] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b456a1-28ef-443f-b489-f0a7c2a2520e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.990774] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.991016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.991222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "b853b581-ea46-4455-8cdb-6ea2f31c22be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.991409] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.991575] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.995965] env[63297]: INFO nova.compute.manager [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Terminating instance [ 1501.998282] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056df653-b8d2-45cb-8ec4-f47fbec7fe49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.002802] env[63297]: DEBUG nova.compute.manager [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1502.003010] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1502.004068] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ef738f-985d-4aa8-8737-adb540b09658 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.020792] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1502.020885] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a047270-60c2-4534-8a86-b6865756df65 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.027041] env[63297]: DEBUG oslo_vmware.api [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1502.027041] env[63297]: value = "task-1697679" [ 1502.027041] env[63297]: _type = "Task" [ 1502.027041] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.034812] env[63297]: DEBUG oslo_vmware.api [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.137660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.406911] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697678, 'name': Rename_Task, 'duration_secs': 0.135814} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.407951] env[63297]: DEBUG nova.network.neutron [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Successfully updated port: ca287ba8-2972-46d4-93dd-a3c4ce330623 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1502.409277] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1502.409651] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30e24318-710c-428f-b51c-9cc572d68a97 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.416638] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1502.416638] env[63297]: value = "task-1697680" [ 1502.416638] env[63297]: _type = "Task" [ 1502.416638] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.424774] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697680, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.475020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.546s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.478524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.076s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.480961] env[63297]: INFO nova.compute.claims [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1502.537542] env[63297]: DEBUG oslo_vmware.api [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697679, 'name': PowerOffVM_Task, 'duration_secs': 0.332055} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.537856] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1502.538063] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1502.538341] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8ac5213-b170-4cbe-b9d6-2f2fac16681c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.608874] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1502.609115] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1502.609297] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Deleting the datastore file [datastore1] b853b581-ea46-4455-8cdb-6ea2f31c22be {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1502.609615] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54c03072-f5df-456d-89bb-6c109f01cbe3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.617616] env[63297]: DEBUG oslo_vmware.api [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for the task: (returnval){ [ 1502.617616] env[63297]: value = "task-1697682" [ 1502.617616] env[63297]: _type = "Task" [ 1502.617616] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.626419] env[63297]: DEBUG oslo_vmware.api [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697682, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.915031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.915031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.915031] env[63297]: DEBUG nova.network.neutron [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1502.926785] env[63297]: DEBUG oslo_vmware.api [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1697680, 'name': PowerOnVM_Task, 'duration_secs': 0.466595} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.927373] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1502.927373] env[63297]: INFO nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Took 7.90 seconds to spawn the instance on the hypervisor. [ 1502.927499] env[63297]: DEBUG nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1502.928182] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4284621-2fc6-4691-81fa-f0038bfe17d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.991422] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4c4ea535-8f42-40f4-bdfb-9f0c5b4502fc tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 54.327s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.993040] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 32.743s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.993040] env[63297]: INFO nova.compute.manager [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Unshelving [ 1503.127365] env[63297]: DEBUG oslo_vmware.api [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Task: {'id': task-1697682, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124066} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.127638] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1503.127818] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1503.127990] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1503.128211] env[63297]: INFO nova.compute.manager [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1503.128498] env[63297]: DEBUG oslo.service.loopingcall [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1503.128702] env[63297]: DEBUG nova.compute.manager [-] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1503.128797] env[63297]: DEBUG nova.network.neutron [-] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1503.260502] env[63297]: DEBUG nova.compute.manager [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Received event network-vif-plugged-ca287ba8-2972-46d4-93dd-a3c4ce330623 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1503.260721] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] Acquiring lock "5914b3ce-f40f-4782-b56a-9fc29c819938-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.260999] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] Lock "5914b3ce-f40f-4782-b56a-9fc29c819938-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.261222] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] Lock "5914b3ce-f40f-4782-b56a-9fc29c819938-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.261345] env[63297]: DEBUG nova.compute.manager [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] No waiting events found dispatching network-vif-plugged-ca287ba8-2972-46d4-93dd-a3c4ce330623 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1503.261518] env[63297]: WARNING nova.compute.manager [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Received unexpected event network-vif-plugged-ca287ba8-2972-46d4-93dd-a3c4ce330623 for instance with vm_state building and task_state spawning. [ 1503.261652] env[63297]: DEBUG nova.compute.manager [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Received event network-changed-ca287ba8-2972-46d4-93dd-a3c4ce330623 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1503.261800] env[63297]: DEBUG nova.compute.manager [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Refreshing instance network info cache due to event network-changed-ca287ba8-2972-46d4-93dd-a3c4ce330623. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1503.261959] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] Acquiring lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.445905] env[63297]: DEBUG nova.network.neutron [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1503.447902] env[63297]: INFO nova.compute.manager [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Took 59.25 seconds to build instance. [ 1503.619507] env[63297]: DEBUG nova.network.neutron [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Updating instance_info_cache with network_info: [{"id": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "address": "fa:16:3e:db:10:68", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca287ba8-29", "ovs_interfaceid": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.885812] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f63bab-1b30-4b69-ab70-28c5d08c60da {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.894376] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d7fcba-5d7a-4916-b59a-1d0ced555fa1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.924583] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a619495-858e-4d01-9868-31abb7f848ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.931913] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b264b2a-7261-452d-8ab6-fdd72f741d5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.945090] env[63297]: DEBUG nova.compute.provider_tree [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1503.950150] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7f389abd-1a8d-4203-8b4b-fe42647b0180 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.766s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.016994] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.069038] env[63297]: DEBUG nova.network.neutron [-] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.122612] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.122930] env[63297]: DEBUG nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Instance network_info: |[{"id": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "address": "fa:16:3e:db:10:68", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca287ba8-29", "ovs_interfaceid": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1504.123247] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] Acquired lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.123409] env[63297]: DEBUG nova.network.neutron [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Refreshing network info cache for port ca287ba8-2972-46d4-93dd-a3c4ce330623 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1504.124649] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:10:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0c0b05e-6d10-474c-9173-4c8f1dacac9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca287ba8-2972-46d4-93dd-a3c4ce330623', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1504.138820] env[63297]: DEBUG oslo.service.loopingcall [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.143559] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1504.143559] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b6efc5a-1df0-4f56-9639-ded5a7f0939d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.165096] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1504.165096] env[63297]: value = "task-1697683" [ 1504.165096] env[63297]: _type = "Task" [ 1504.165096] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.174131] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697683, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.437436] env[63297]: DEBUG nova.compute.manager [req-4f179a91-523f-493c-b6ac-380584e3e5be req-9fb6324e-9cd8-4760-bf88-bad45cb0b7cb service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Received event network-changed-bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1504.437629] env[63297]: DEBUG nova.compute.manager [req-4f179a91-523f-493c-b6ac-380584e3e5be req-9fb6324e-9cd8-4760-bf88-bad45cb0b7cb service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Refreshing instance network info cache due to event network-changed-bf9b1829-ba35-499e-993f-44fbd669974d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1504.437854] env[63297]: DEBUG oslo_concurrency.lockutils [req-4f179a91-523f-493c-b6ac-380584e3e5be req-9fb6324e-9cd8-4760-bf88-bad45cb0b7cb service nova] Acquiring lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.438011] env[63297]: DEBUG oslo_concurrency.lockutils [req-4f179a91-523f-493c-b6ac-380584e3e5be req-9fb6324e-9cd8-4760-bf88-bad45cb0b7cb service nova] Acquired lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.438850] env[63297]: DEBUG nova.network.neutron [req-4f179a91-523f-493c-b6ac-380584e3e5be req-9fb6324e-9cd8-4760-bf88-bad45cb0b7cb service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Refreshing network info cache for port bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1504.448909] env[63297]: DEBUG nova.scheduler.client.report [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1504.467037] env[63297]: DEBUG nova.network.neutron [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Updated VIF entry in instance network info cache for port ca287ba8-2972-46d4-93dd-a3c4ce330623. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1504.467444] env[63297]: DEBUG nova.network.neutron [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Updating instance_info_cache with network_info: [{"id": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "address": "fa:16:3e:db:10:68", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca287ba8-29", "ovs_interfaceid": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.571831] env[63297]: INFO nova.compute.manager [-] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Took 1.44 seconds to deallocate network for instance. [ 1504.676099] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697683, 'name': CreateVM_Task, 'duration_secs': 0.346133} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.676320] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1504.676990] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.677169] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.677523] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1504.677761] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-050ab015-5534-4a6b-b612-a6c792842772 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.682613] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1504.682613] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529e1d7d-d9bc-ba4e-9396-98089b447f26" [ 1504.682613] env[63297]: _type = "Task" [ 1504.682613] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.690167] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529e1d7d-d9bc-ba4e-9396-98089b447f26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.954574] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.955086] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1504.957797] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.502s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.959433] env[63297]: INFO nova.compute.claims [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1504.969904] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd2e1a8e-f1dc-4484-9f93-18a8f5092511 req-6abe2160-3c2a-4f7e-9a4c-620dbfbf7d7b service nova] Releasing lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.079533] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.179726] env[63297]: DEBUG nova.network.neutron [req-4f179a91-523f-493c-b6ac-380584e3e5be req-9fb6324e-9cd8-4760-bf88-bad45cb0b7cb service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Updated VIF entry in instance network info cache for port bf9b1829-ba35-499e-993f-44fbd669974d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1505.180102] env[63297]: DEBUG nova.network.neutron [req-4f179a91-523f-493c-b6ac-380584e3e5be req-9fb6324e-9cd8-4760-bf88-bad45cb0b7cb service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Updating instance_info_cache with network_info: [{"id": "bf9b1829-ba35-499e-993f-44fbd669974d", "address": "fa:16:3e:e2:48:de", "network": {"id": "5b91ea3f-f213-4700-80d0-18d8bfab4cf1", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1365683862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d9f790a174e4b7ea1c5646c3b7be873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf9b1829-ba", "ovs_interfaceid": "bf9b1829-ba35-499e-993f-44fbd669974d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.193115] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529e1d7d-d9bc-ba4e-9396-98089b447f26, 'name': SearchDatastore_Task, 'duration_secs': 0.009931} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.194079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.194313] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1505.194544] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.194689] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.194877] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1505.195369] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-656e646b-5801-4b00-bfd6-7374f049eb7e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.203382] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1505.203551] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1505.204251] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64ecb8a-e1a7-41bc-bcd4-e977608793b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.209196] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1505.209196] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5232bc36-cc18-8f6f-5795-01036074e3bf" [ 1505.209196] env[63297]: _type = "Task" [ 1505.209196] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.216484] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5232bc36-cc18-8f6f-5795-01036074e3bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.285339] env[63297]: DEBUG nova.compute.manager [req-c7c3cda7-84ef-4c2a-a009-4c4dbe5371a9 req-c8b013ed-6e51-4427-98b3-77c654f5f792 service nova] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Received event network-vif-deleted-23a6c287-76ba-4a4d-9cfa-cd71492215be {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1505.464165] env[63297]: DEBUG nova.compute.utils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1505.470140] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1505.470140] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1505.538340] env[63297]: DEBUG nova.policy [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb119f2cb29446108035492d78b47b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '790de76ab96a4e70a18619744dba096c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1505.683420] env[63297]: DEBUG oslo_concurrency.lockutils [req-4f179a91-523f-493c-b6ac-380584e3e5be req-9fb6324e-9cd8-4760-bf88-bad45cb0b7cb service nova] Releasing lock "refresh_cache-10def566-2d1f-4ea2-9df5-ebf4d77f7b48" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.720743] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5232bc36-cc18-8f6f-5795-01036074e3bf, 'name': SearchDatastore_Task, 'duration_secs': 0.00808} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.721500] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9991afa7-742a-4108-ac40-cb46eeb1b3f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.727106] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1505.727106] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52581328-2635-a275-c44a-ce17bd9d0d07" [ 1505.727106] env[63297]: _type = "Task" [ 1505.727106] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.735328] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52581328-2635-a275-c44a-ce17bd9d0d07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.820865] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Successfully created port: 18133a60-2528-4b03-8ed3-ce5b7a4b6cae {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1505.970406] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1506.238503] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52581328-2635-a275-c44a-ce17bd9d0d07, 'name': SearchDatastore_Task, 'duration_secs': 0.009543} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.240744] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.240995] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938/5914b3ce-f40f-4782-b56a-9fc29c819938.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1506.241420] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0633aa9-b724-4da8-b7ed-18142d44e20a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.248792] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1506.248792] env[63297]: value = "task-1697684" [ 1506.248792] env[63297]: _type = "Task" [ 1506.248792] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.258736] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.338019] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0508389-e89e-4f00-8396-f7a6fd30f350 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.346600] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19f6971-dd75-43c4-ab32-57338fdb1f7e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.377015] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536cc022-6df5-4858-a42d-ff9925835bca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.385047] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feedd78-57de-48c0-9e5c-6df40d45310a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.399796] env[63297]: DEBUG nova.compute.provider_tree [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1506.759313] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473409} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.759508] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938/5914b3ce-f40f-4782-b56a-9fc29c819938.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1506.759713] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1506.759949] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a55cc244-1888-4a1b-8458-1dbdec47a0dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.766267] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1506.766267] env[63297]: value = "task-1697685" [ 1506.766267] env[63297]: _type = "Task" [ 1506.766267] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.773677] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.904217] env[63297]: DEBUG nova.scheduler.client.report [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1506.984467] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1507.011080] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1507.011353] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1507.011515] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1507.011694] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1507.011837] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1507.011980] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1507.012209] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1507.012365] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1507.012531] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1507.012689] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1507.012859] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1507.013743] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca37ea8-eb8c-4065-b8a1-6fd92bbf05b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.021747] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be197cd7-52d1-4235-b8ac-9bc667485f6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.280188] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062783} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.280188] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1507.280188] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c63308-099d-4aa8-b343-30792e4321a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.303447] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938/5914b3ce-f40f-4782-b56a-9fc29c819938.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1507.304760] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d40ad7c2-35f1-4915-9fb7-8ba40e495ad4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.320554] env[63297]: DEBUG nova.compute.manager [req-4ecc2db0-7063-42d5-9bee-3a4e66840332 req-01b59d6b-02f9-4796-b7f2-79b7660aff5f service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Received event network-vif-plugged-18133a60-2528-4b03-8ed3-ce5b7a4b6cae {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1507.320803] env[63297]: DEBUG oslo_concurrency.lockutils [req-4ecc2db0-7063-42d5-9bee-3a4e66840332 req-01b59d6b-02f9-4796-b7f2-79b7660aff5f service nova] Acquiring lock "fc54a008-eb2e-4b10-86ea-be7c82b93139-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.321022] env[63297]: DEBUG oslo_concurrency.lockutils [req-4ecc2db0-7063-42d5-9bee-3a4e66840332 req-01b59d6b-02f9-4796-b7f2-79b7660aff5f service nova] Lock "fc54a008-eb2e-4b10-86ea-be7c82b93139-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.321192] env[63297]: DEBUG oslo_concurrency.lockutils [req-4ecc2db0-7063-42d5-9bee-3a4e66840332 req-01b59d6b-02f9-4796-b7f2-79b7660aff5f service nova] Lock "fc54a008-eb2e-4b10-86ea-be7c82b93139-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.321354] env[63297]: DEBUG nova.compute.manager [req-4ecc2db0-7063-42d5-9bee-3a4e66840332 req-01b59d6b-02f9-4796-b7f2-79b7660aff5f service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] No waiting events found dispatching network-vif-plugged-18133a60-2528-4b03-8ed3-ce5b7a4b6cae {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1507.321516] env[63297]: WARNING nova.compute.manager [req-4ecc2db0-7063-42d5-9bee-3a4e66840332 req-01b59d6b-02f9-4796-b7f2-79b7660aff5f service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Received unexpected event network-vif-plugged-18133a60-2528-4b03-8ed3-ce5b7a4b6cae for instance with vm_state building and task_state spawning. [ 1507.328073] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1507.328073] env[63297]: value = "task-1697686" [ 1507.328073] env[63297]: _type = "Task" [ 1507.328073] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.336169] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697686, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.388222] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Successfully updated port: 18133a60-2528-4b03-8ed3-ce5b7a4b6cae {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1507.409315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.409967] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1507.412954] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.425s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.413069] env[63297]: DEBUG nova.objects.instance [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lazy-loading 'resources' on Instance uuid dc196e68-b725-43a1-9848-e84d1b138245 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1507.837545] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697686, 'name': ReconfigVM_Task, 'duration_secs': 0.268562} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.837831] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938/5914b3ce-f40f-4782-b56a-9fc29c819938.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1507.838892] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ec73cc8-5c4a-45dc-8589-3ec194d4430e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.845389] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1507.845389] env[63297]: value = "task-1697687" [ 1507.845389] env[63297]: _type = "Task" [ 1507.845389] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.852692] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697687, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.889446] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "refresh_cache-fc54a008-eb2e-4b10-86ea-be7c82b93139" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.889570] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "refresh_cache-fc54a008-eb2e-4b10-86ea-be7c82b93139" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.889735] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1507.915764] env[63297]: DEBUG nova.compute.utils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1507.919934] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1507.920161] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1507.977943] env[63297]: DEBUG nova.policy [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb119f2cb29446108035492d78b47b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '790de76ab96a4e70a18619744dba096c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1508.333632] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Successfully created port: 5a7d0713-7282-4609-a3ac-28c150824f43 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1508.355140] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697687, 'name': Rename_Task, 'duration_secs': 0.140393} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.355554] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1508.355889] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcd9048f-9038-4d1f-9c2e-1c0ff0267a7a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.367453] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1508.367453] env[63297]: value = "task-1697688" [ 1508.367453] env[63297]: _type = "Task" [ 1508.367453] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.373125] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697688, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.389026] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df747ce5-0c5d-4c11-89cb-4a73454a7b90 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.393885] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd9aa10-95a2-4201-b3d4-69ae982783a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.428143] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1508.434020] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1508.434020] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d0cf83-b7de-4c93-a3c0-e7bff28b91d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.442124] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5284ef0a-27c4-4428-901a-bc8eb671b609 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.457960] env[63297]: DEBUG nova.compute.provider_tree [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.635802] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Updating instance_info_cache with network_info: [{"id": "18133a60-2528-4b03-8ed3-ce5b7a4b6cae", "address": "fa:16:3e:f8:81:19", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18133a60-25", "ovs_interfaceid": "18133a60-2528-4b03-8ed3-ce5b7a4b6cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.875181] env[63297]: DEBUG oslo_vmware.api [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697688, 'name': PowerOnVM_Task, 'duration_secs': 0.427117} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.875516] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1508.875683] env[63297]: INFO nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Took 6.92 seconds to spawn the instance on the hypervisor. [ 1508.875861] env[63297]: DEBUG nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1508.876666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5820c90-06a5-421d-ad62-a6fbf9078815 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.962790] env[63297]: DEBUG nova.scheduler.client.report [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1509.138051] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "refresh_cache-fc54a008-eb2e-4b10-86ea-be7c82b93139" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.138409] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Instance network_info: |[{"id": "18133a60-2528-4b03-8ed3-ce5b7a4b6cae", "address": "fa:16:3e:f8:81:19", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18133a60-25", "ovs_interfaceid": "18133a60-2528-4b03-8ed3-ce5b7a4b6cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1509.138916] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:81:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18133a60-2528-4b03-8ed3-ce5b7a4b6cae', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1509.146937] env[63297]: DEBUG oslo.service.loopingcall [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1509.147192] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1509.147459] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87b2cc3b-8969-4fd6-b59b-e40e488e2635 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.167994] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1509.167994] env[63297]: value = "task-1697689" [ 1509.167994] env[63297]: _type = "Task" [ 1509.167994] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.175923] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697689, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.337252] env[63297]: DEBUG nova.compute.manager [req-4fad719a-ad37-44d7-bc60-92c2993b0658 req-994e6182-996a-49bb-b4a9-43180b411ae6 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Received event network-changed-18133a60-2528-4b03-8ed3-ce5b7a4b6cae {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.337512] env[63297]: DEBUG nova.compute.manager [req-4fad719a-ad37-44d7-bc60-92c2993b0658 req-994e6182-996a-49bb-b4a9-43180b411ae6 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Refreshing instance network info cache due to event network-changed-18133a60-2528-4b03-8ed3-ce5b7a4b6cae. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1509.337766] env[63297]: DEBUG oslo_concurrency.lockutils [req-4fad719a-ad37-44d7-bc60-92c2993b0658 req-994e6182-996a-49bb-b4a9-43180b411ae6 service nova] Acquiring lock "refresh_cache-fc54a008-eb2e-4b10-86ea-be7c82b93139" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.337918] env[63297]: DEBUG oslo_concurrency.lockutils [req-4fad719a-ad37-44d7-bc60-92c2993b0658 req-994e6182-996a-49bb-b4a9-43180b411ae6 service nova] Acquired lock "refresh_cache-fc54a008-eb2e-4b10-86ea-be7c82b93139" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.338095] env[63297]: DEBUG nova.network.neutron [req-4fad719a-ad37-44d7-bc60-92c2993b0658 req-994e6182-996a-49bb-b4a9-43180b411ae6 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Refreshing network info cache for port 18133a60-2528-4b03-8ed3-ce5b7a4b6cae {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1509.394040] env[63297]: INFO nova.compute.manager [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Took 46.48 seconds to build instance. [ 1509.443550] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1509.470047] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1509.470298] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1509.470456] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1509.470634] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1509.470778] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1509.470962] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1509.471210] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1509.471372] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1509.471539] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1509.471697] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1509.471867] env[63297]: DEBUG nova.virt.hardware [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1509.472630] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.060s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.475366] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f47e2b-6295-4ef2-b8fd-c344046f4d6c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.478340] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 38.201s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.485931] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4368a70e-6f48-47e2-922f-1c7cda6fa9db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.502276] env[63297]: INFO nova.scheduler.client.report [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Deleted allocations for instance dc196e68-b725-43a1-9848-e84d1b138245 [ 1509.677916] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697689, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.897663] env[63297]: DEBUG oslo_concurrency.lockutils [None req-672612db-03d2-470a-b44b-c88ec4891672 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "5914b3ce-f40f-4782-b56a-9fc29c819938" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.991s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.010319] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8bbbf7e7-41a2-446a-92ad-b6379db3afd8 tempest-ServerShowV247Test-1477613391 tempest-ServerShowV247Test-1477613391-project-member] Lock "dc196e68-b725-43a1-9848-e84d1b138245" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.824s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.015583] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Successfully updated port: 5a7d0713-7282-4609-a3ac-28c150824f43 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1510.133063] env[63297]: DEBUG nova.network.neutron [req-4fad719a-ad37-44d7-bc60-92c2993b0658 req-994e6182-996a-49bb-b4a9-43180b411ae6 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Updated VIF entry in instance network info cache for port 18133a60-2528-4b03-8ed3-ce5b7a4b6cae. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1510.133063] env[63297]: DEBUG nova.network.neutron [req-4fad719a-ad37-44d7-bc60-92c2993b0658 req-994e6182-996a-49bb-b4a9-43180b411ae6 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Updating instance_info_cache with network_info: [{"id": "18133a60-2528-4b03-8ed3-ce5b7a4b6cae", "address": "fa:16:3e:f8:81:19", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18133a60-25", "ovs_interfaceid": "18133a60-2528-4b03-8ed3-ce5b7a4b6cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.181022] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697689, 'name': CreateVM_Task, 'duration_secs': 0.690313} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.181234] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1510.181888] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.182186] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.182401] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1510.182734] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f113158e-2d99-4316-9ab5-5ce7ba76f8b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.188343] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1510.188343] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5209485d-9d5b-53ae-e1fd-bb9ba480f4d5" [ 1510.188343] env[63297]: _type = "Task" [ 1510.188343] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.197815] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5209485d-9d5b-53ae-e1fd-bb9ba480f4d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.520050] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "refresh_cache-4701073f-eeee-4f37-919a-4c53663ac15f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.520376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "refresh_cache-4701073f-eeee-4f37-919a-4c53663ac15f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.520376] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1510.532170] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b14e8466-68ab-4705-a439-6db961a149b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.532327] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 14b4518e-044a-451a-845d-fa3742e5b3e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.532543] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 9b1306f9-4b0a-4116-8e79-271478f33490 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.532702] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b853b581-ea46-4455-8cdb-6ea2f31c22be is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1510.532822] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.532951] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 8fa5fef6-8768-4e24-aab3-db56a10588c2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1510.533244] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5124f7fb-1293-4964-98c4-426ecfce7d10 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1510.533244] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 765f3232-f3f9-4d9b-92f2-fb6603f2a90a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.533389] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b1ed5d76-d358-49d3-a854-8f968bc987ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.533446] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b95b7656-70ac-4eaf-9934-4b4c50e78035 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1510.534274] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b261c90f-642d-42b7-8b79-d87eeaf0537a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534274] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534274] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 89c9cd40-585e-4ae6-88b3-1a33a94c3b52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534274] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 96265295-6b0c-4803-bb89-6166c9d3fc7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534274] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 71faf167-dfe3-4792-9841-b5ab4b333884 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1510.534274] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance efaa465d-f6b2-4891-8e96-b4c3af052759 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1510.534274] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ac112251-8cc3-4f57-8983-8a07e2a068f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534274] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 66b7a1e5-5e74-49db-99f3-4427d7297bf2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534657] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534657] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5914b3ce-f40f-4782-b56a-9fc29c819938 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534657] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fc54a008-eb2e-4b10-86ea-be7c82b93139 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.534741] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 4701073f-eeee-4f37-919a-4c53663ac15f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1510.634705] env[63297]: DEBUG oslo_concurrency.lockutils [req-4fad719a-ad37-44d7-bc60-92c2993b0658 req-994e6182-996a-49bb-b4a9-43180b411ae6 service nova] Releasing lock "refresh_cache-fc54a008-eb2e-4b10-86ea-be7c82b93139" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.682487] env[63297]: INFO nova.compute.manager [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Rescuing [ 1510.682950] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.683129] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.683306] env[63297]: DEBUG nova.network.neutron [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1510.700119] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5209485d-9d5b-53ae-e1fd-bb9ba480f4d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009885} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.701017] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.701017] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1510.701017] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.701157] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.701331] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1510.701831] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7569730-adcd-4dd5-89b2-cb8f745051a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.711739] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1510.711952] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1510.712784] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a828f49-1037-461b-98d4-060b41796e54 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.719214] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1510.719214] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523ee675-31fc-58d0-7cb6-4048c7a4fede" [ 1510.719214] env[63297]: _type = "Task" [ 1510.719214] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.727759] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523ee675-31fc-58d0-7cb6-4048c7a4fede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.038905] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fba9040d-f904-44a1-8785-14d4696ea939 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.052145] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1511.194139] env[63297]: DEBUG nova.network.neutron [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Updating instance_info_cache with network_info: [{"id": "5a7d0713-7282-4609-a3ac-28c150824f43", "address": "fa:16:3e:1f:ef:ec", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a7d0713-72", "ovs_interfaceid": "5a7d0713-7282-4609-a3ac-28c150824f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.232202] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523ee675-31fc-58d0-7cb6-4048c7a4fede, 'name': SearchDatastore_Task, 'duration_secs': 0.008253} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.235133] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db42ecf7-9acd-4fbc-8a0d-3397ac645a07 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.240999] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1511.240999] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5297c548-3f33-505b-34e0-a8ae113cf15e" [ 1511.240999] env[63297]: _type = "Task" [ 1511.240999] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.249127] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5297c548-3f33-505b-34e0-a8ae113cf15e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.396715] env[63297]: DEBUG nova.compute.manager [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Received event network-vif-plugged-5a7d0713-7282-4609-a3ac-28c150824f43 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.396935] env[63297]: DEBUG oslo_concurrency.lockutils [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] Acquiring lock "4701073f-eeee-4f37-919a-4c53663ac15f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.397155] env[63297]: DEBUG oslo_concurrency.lockutils [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] Lock "4701073f-eeee-4f37-919a-4c53663ac15f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.397323] env[63297]: DEBUG oslo_concurrency.lockutils [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] Lock "4701073f-eeee-4f37-919a-4c53663ac15f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.397535] env[63297]: DEBUG nova.compute.manager [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] No waiting events found dispatching network-vif-plugged-5a7d0713-7282-4609-a3ac-28c150824f43 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1511.397717] env[63297]: WARNING nova.compute.manager [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Received unexpected event network-vif-plugged-5a7d0713-7282-4609-a3ac-28c150824f43 for instance with vm_state building and task_state spawning. [ 1511.397876] env[63297]: DEBUG nova.compute.manager [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Received event network-changed-5a7d0713-7282-4609-a3ac-28c150824f43 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.398339] env[63297]: DEBUG nova.compute.manager [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Refreshing instance network info cache due to event network-changed-5a7d0713-7282-4609-a3ac-28c150824f43. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1511.398579] env[63297]: DEBUG oslo_concurrency.lockutils [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] Acquiring lock "refresh_cache-4701073f-eeee-4f37-919a-4c53663ac15f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.453976] env[63297]: DEBUG nova.network.neutron [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Updating instance_info_cache with network_info: [{"id": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "address": "fa:16:3e:db:10:68", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca287ba8-29", "ovs_interfaceid": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.541767] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 42d872d6-da12-474b-8741-1d991d507cfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.696682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "refresh_cache-4701073f-eeee-4f37-919a-4c53663ac15f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.697093] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Instance network_info: |[{"id": "5a7d0713-7282-4609-a3ac-28c150824f43", "address": "fa:16:3e:1f:ef:ec", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a7d0713-72", "ovs_interfaceid": "5a7d0713-7282-4609-a3ac-28c150824f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1511.697459] env[63297]: DEBUG oslo_concurrency.lockutils [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] Acquired lock "refresh_cache-4701073f-eeee-4f37-919a-4c53663ac15f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.697640] env[63297]: DEBUG nova.network.neutron [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Refreshing network info cache for port 5a7d0713-7282-4609-a3ac-28c150824f43 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1511.698963] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:ef:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a7d0713-7282-4609-a3ac-28c150824f43', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1511.707000] env[63297]: DEBUG oslo.service.loopingcall [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1511.707993] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1511.708233] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59076dc0-c517-4dd3-bae3-7c0bbd23da67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.730717] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1511.730717] env[63297]: value = "task-1697690" [ 1511.730717] env[63297]: _type = "Task" [ 1511.730717] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.738490] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697690, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.749991] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5297c548-3f33-505b-34e0-a8ae113cf15e, 'name': SearchDatastore_Task, 'duration_secs': 0.025807} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.750266] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.750547] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fc54a008-eb2e-4b10-86ea-be7c82b93139/fc54a008-eb2e-4b10-86ea-be7c82b93139.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1511.750803] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-569b6dd0-8adb-46d9-a25f-1d04452252f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.756949] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1511.756949] env[63297]: value = "task-1697691" [ 1511.756949] env[63297]: _type = "Task" [ 1511.756949] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.765441] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697691, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.960581] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.045300] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1512.241711] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697690, 'name': CreateVM_Task, 'duration_secs': 0.46855} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.244148] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1512.244888] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.245062] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.245397] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1512.245994] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32e5af1c-50fe-41f3-83f6-150d2d80143b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.251130] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1512.251130] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5298e89f-6cb0-bbe9-80a2-bd1be9896574" [ 1512.251130] env[63297]: _type = "Task" [ 1512.251130] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.259583] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5298e89f-6cb0-bbe9-80a2-bd1be9896574, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.269152] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697691, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.458394] env[63297]: DEBUG nova.network.neutron [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Updated VIF entry in instance network info cache for port 5a7d0713-7282-4609-a3ac-28c150824f43. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1512.458901] env[63297]: DEBUG nova.network.neutron [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Updating instance_info_cache with network_info: [{"id": "5a7d0713-7282-4609-a3ac-28c150824f43", "address": "fa:16:3e:1f:ef:ec", "network": {"id": "c64071c6-1f68-4a0f-bbee-4e5b755ec361", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1843002314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "790de76ab96a4e70a18619744dba096c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a7d0713-72", "ovs_interfaceid": "5a7d0713-7282-4609-a3ac-28c150824f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.496351] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1512.496549] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-495af559-3be5-4c15-9e05-d8055b38e998 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.505612] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1512.505612] env[63297]: value = "task-1697692" [ 1512.505612] env[63297]: _type = "Task" [ 1512.505612] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.515027] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697692, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.549596] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 44f4776e-d4a1-40ad-a03b-bb03582b95bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1512.761715] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5298e89f-6cb0-bbe9-80a2-bd1be9896574, 'name': SearchDatastore_Task, 'duration_secs': 0.060654} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.764765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.765011] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1512.765246] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.765391] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.765561] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1512.765819] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-771c92b3-470a-4ff2-b436-d13fcb3e845d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.772827] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697691, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63274} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.773870] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fc54a008-eb2e-4b10-86ea-be7c82b93139/fc54a008-eb2e-4b10-86ea-be7c82b93139.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1512.774093] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1512.774346] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1512.774505] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1512.775273] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b99c452c-d3e8-4d90-bef4-566dcc77bfda {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.776961] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a465f81b-e5e7-4481-a012-d483d8ade4c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.781722] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1512.781722] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523f49df-3cb4-00b2-1a7b-1df1a5188c68" [ 1512.781722] env[63297]: _type = "Task" [ 1512.781722] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.785586] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1512.785586] env[63297]: value = "task-1697693" [ 1512.785586] env[63297]: _type = "Task" [ 1512.785586] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.791894] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523f49df-3cb4-00b2-1a7b-1df1a5188c68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.796365] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.962364] env[63297]: DEBUG oslo_concurrency.lockutils [req-bbe2edd9-61a4-4b51-9a38-552061b544c3 req-abf1df1b-c926-4872-b3c5-01c808fe1962 service nova] Releasing lock "refresh_cache-4701073f-eeee-4f37-919a-4c53663ac15f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.018694] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697692, 'name': PowerOffVM_Task, 'duration_secs': 0.229298} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.018952] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1513.019728] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16113d6a-3c97-4007-8593-23dd98b297de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.037697] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc30873-926a-4ce5-95af-2670c505f052 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.052536] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5a868570-7504-4262-80b2-a458c219e689 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.063418] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1513.063680] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b79e8dc-d561-4813-b855-e2945636612d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.070895] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1513.070895] env[63297]: value = "task-1697694" [ 1513.070895] env[63297]: _type = "Task" [ 1513.070895] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.078306] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.295402] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523f49df-3cb4-00b2-1a7b-1df1a5188c68, 'name': SearchDatastore_Task, 'duration_secs': 0.008441} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.298609] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06408} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.298849] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f40fdd22-91a5-4afe-9776-6d289cdd3040 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.300824] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1513.301519] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767fbb14-7456-4490-a3b1-a894f66da16a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.325120] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] fc54a008-eb2e-4b10-86ea-be7c82b93139/fc54a008-eb2e-4b10-86ea-be7c82b93139.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1513.326346] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a07fb567-0a94-4f84-b1fa-df19ddaf23e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.340115] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1513.340115] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52729d8f-a96f-81e4-8f94-2bbbf07fa2c9" [ 1513.340115] env[63297]: _type = "Task" [ 1513.340115] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.345714] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1513.345714] env[63297]: value = "task-1697695" [ 1513.345714] env[63297]: _type = "Task" [ 1513.345714] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.349087] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52729d8f-a96f-81e4-8f94-2bbbf07fa2c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009541} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.351906] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.352220] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 4701073f-eeee-4f37-919a-4c53663ac15f/4701073f-eeee-4f37-919a-4c53663ac15f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1513.352463] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16fed483-bba8-477a-b7b6-1588b70402ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.358991] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697695, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.360110] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1513.360110] env[63297]: value = "task-1697696" [ 1513.360110] env[63297]: _type = "Task" [ 1513.360110] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.367495] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697696, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.555287] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5cdb44c7-3dc1-4bce-8864-a1a40150e730 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.581391] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1513.581697] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1513.581996] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1513.582124] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.582305] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1513.582569] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99f8e862-6b89-4fbf-bcc0-b50bb07e3c89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.595606] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1513.595804] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1513.596674] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eded5f1-59c9-4f62-bedf-7f539b333274 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.602639] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1513.602639] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bff492-1fee-a77f-e3bd-615e6f562790" [ 1513.602639] env[63297]: _type = "Task" [ 1513.602639] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.611424] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bff492-1fee-a77f-e3bd-615e6f562790, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.858319] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697695, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.870078] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697696, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490133} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.870453] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 4701073f-eeee-4f37-919a-4c53663ac15f/4701073f-eeee-4f37-919a-4c53663ac15f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1513.870737] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1513.870999] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9074ce4-2605-42df-abbf-798a71f9ede9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.877436] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1513.877436] env[63297]: value = "task-1697697" [ 1513.877436] env[63297]: _type = "Task" [ 1513.877436] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.884549] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.058758] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 57d93827-2a5a-4f12-a74b-147a1a934dd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1514.059180] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1514.059366] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1514.114585] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bff492-1fee-a77f-e3bd-615e6f562790, 'name': SearchDatastore_Task, 'duration_secs': 0.065422} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.115408] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-133e6513-48cd-43b5-90bf-933278ed08b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.120307] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1514.120307] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c4ec33-1a75-4878-3870-fe71b0fa9a51" [ 1514.120307] env[63297]: _type = "Task" [ 1514.120307] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.127606] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c4ec33-1a75-4878-3870-fe71b0fa9a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.360571] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697695, 'name': ReconfigVM_Task, 'duration_secs': 0.560684} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.362699] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Reconfigured VM instance instance-0000004a to attach disk [datastore1] fc54a008-eb2e-4b10-86ea-be7c82b93139/fc54a008-eb2e-4b10-86ea-be7c82b93139.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1514.363458] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11e78a18-d8fa-4f39-bed3-77cfc0aca00b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.369904] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1514.369904] env[63297]: value = "task-1697698" [ 1514.369904] env[63297]: _type = "Task" [ 1514.369904] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.380239] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697698, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.390209] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065491} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.390461] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1514.391216] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad8620e-9d72-4919-902a-c6c3d1ecfd4f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.414978] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 4701073f-eeee-4f37-919a-4c53663ac15f/4701073f-eeee-4f37-919a-4c53663ac15f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1514.417541] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-388b5724-3287-49d7-b0b3-4e42553f69f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.440167] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1514.440167] env[63297]: value = "task-1697699" [ 1514.440167] env[63297]: _type = "Task" [ 1514.440167] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.456466] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697699, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.478141] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee917d6-8505-4843-9a56-85a9638833a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.485177] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8d0e79-14e7-4b2e-9f19-638f5de354dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.514683] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0741156-10d9-4a9a-8033-a75c4bad67f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.521953] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4368a9-0ce3-40d3-87b4-97c9a02f8cb7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.534805] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1514.632519] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c4ec33-1a75-4878-3870-fe71b0fa9a51, 'name': SearchDatastore_Task, 'duration_secs': 0.04414} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.632817] env[63297]: DEBUG oslo_concurrency.lockutils [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.633027] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. {{(pid=63297) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1514.633278] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3633e7f3-20ff-4688-b95d-0e7d9683ecde {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.640034] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1514.640034] env[63297]: value = "task-1697700" [ 1514.640034] env[63297]: _type = "Task" [ 1514.640034] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.647485] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.881266] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697698, 'name': Rename_Task, 'duration_secs': 0.132957} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.881618] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1514.881883] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cefe1eb0-a3ac-4b4e-ba1d-815afaed2ced {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.888738] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1514.888738] env[63297]: value = "task-1697701" [ 1514.888738] env[63297]: _type = "Task" [ 1514.888738] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.896895] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.952169] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697699, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.039170] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1515.150075] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697700, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471681} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.150340] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. [ 1515.151092] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b48a81a-2cf8-4bca-a01e-a9f12c97ee83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.176198] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1515.176446] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7000caf2-91f0-4b0c-81c4-232f565ae2f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.195018] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1515.195018] env[63297]: value = "task-1697702" [ 1515.195018] env[63297]: _type = "Task" [ 1515.195018] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.202981] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697702, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.399377] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697701, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.452920] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697699, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.546183] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1515.546183] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.068s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.546445] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.246s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.548350] env[63297]: INFO nova.compute.claims [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1515.550767] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1515.551139] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1515.707020] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697702, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.899902] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697701, 'name': PowerOnVM_Task, 'duration_secs': 0.549946} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.900185] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1515.900384] env[63297]: INFO nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1515.900562] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1515.901350] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aeb8687-9fac-4ba8-ae04-92295b1cdc83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.952897] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697699, 'name': ReconfigVM_Task, 'duration_secs': 1.338538} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.953186] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 4701073f-eeee-4f37-919a-4c53663ac15f/4701073f-eeee-4f37-919a-4c53663ac15f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1515.953776] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1219c5c6-f8e0-48d0-8ef7-352344dce5e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.960071] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1515.960071] env[63297]: value = "task-1697703" [ 1515.960071] env[63297]: _type = "Task" [ 1515.960071] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.968748] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697703, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.067046] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] There are 52 instances to clean {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1516.067156] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c05a249e-ab88-41f0-81f5-b644b3da5d2d] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1516.207497] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697702, 'name': ReconfigVM_Task, 'duration_secs': 0.582614} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.207803] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1516.208645] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3541ca-d58e-495b-bab0-5d7a3eb5a573 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.237824] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c28c8ebf-020a-44f8-b91c-47533e4e134e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.253409] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1516.253409] env[63297]: value = "task-1697704" [ 1516.253409] env[63297]: _type = "Task" [ 1516.253409] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.263102] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697704, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.409933] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781540f3-8641-4bc4-b287-a2dc5828b5d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.421193] env[63297]: INFO nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Took 46.04 seconds to build instance. [ 1516.422779] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387755ad-b119-42de-a0ec-87d6363b3bee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.455391] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f109e3f-4332-42f6-b091-a21ca90c8eeb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.465676] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e27dce-bc9d-4f1a-8fed-5c2b0ffc6aa8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.474418] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697703, 'name': Rename_Task, 'duration_secs': 0.142405} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.482015] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1516.483027] env[63297]: DEBUG nova.compute.provider_tree [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1516.483841] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9478e91b-822a-48a4-8343-6dfe0e7f53f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.489844] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1516.489844] env[63297]: value = "task-1697705" [ 1516.489844] env[63297]: _type = "Task" [ 1516.489844] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.498475] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697705, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.574150] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 708d1907-1619-4aa4-b0b3-ae58f046a760] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1516.763237] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697704, 'name': ReconfigVM_Task, 'duration_secs': 0.452696} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.763579] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1516.763820] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef2b1239-9a81-4ade-9354-b320663d403d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.769350] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1516.769350] env[63297]: value = "task-1697706" [ 1516.769350] env[63297]: _type = "Task" [ 1516.769350] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.778462] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.927179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "fc54a008-eb2e-4b10-86ea-be7c82b93139" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.556s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.986867] env[63297]: DEBUG nova.scheduler.client.report [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1516.999784] env[63297]: DEBUG oslo_vmware.api [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697705, 'name': PowerOnVM_Task, 'duration_secs': 0.43286} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.000142] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1517.000404] env[63297]: INFO nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Took 7.56 seconds to spawn the instance on the hypervisor. [ 1517.000615] env[63297]: DEBUG nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1517.001981] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f32fc5-e5aa-4494-b247-afc813e7e65b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.077181] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: e7fae121-174f-4955-a185-b3f92c6ab110] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1517.282911] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697706, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.493780] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.947s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.494081] env[63297]: DEBUG nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1517.497128] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.884s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.498393] env[63297]: INFO nova.compute.claims [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1517.518656] env[63297]: INFO nova.compute.manager [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Took 47.08 seconds to build instance. [ 1517.581120] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: d2436717-7230-448f-b310-d062b1f11c52] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1517.781199] env[63297]: DEBUG oslo_vmware.api [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697706, 'name': PowerOnVM_Task, 'duration_secs': 0.54397} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.781199] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1517.782889] env[63297]: DEBUG nova.compute.manager [None req-50cf79dc-f6a8-4413-a0fd-59353226c53a tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1517.783623] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b974d70-e2e3-4049-8555-6bf068e837dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.928841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "fc54a008-eb2e-4b10-86ea-be7c82b93139" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.928841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "fc54a008-eb2e-4b10-86ea-be7c82b93139" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.928841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "fc54a008-eb2e-4b10-86ea-be7c82b93139-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.928841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "fc54a008-eb2e-4b10-86ea-be7c82b93139-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.929084] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "fc54a008-eb2e-4b10-86ea-be7c82b93139-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.931178] env[63297]: INFO nova.compute.manager [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Terminating instance [ 1517.932887] env[63297]: DEBUG nova.compute.manager [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1517.933099] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1517.933945] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be93268-7b78-43ec-a92b-cb24f8ca66cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.941884] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1517.941884] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbaa2045-d7f1-4d95-a5e8-258bfac3769f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.948142] env[63297]: DEBUG oslo_vmware.api [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1517.948142] env[63297]: value = "task-1697707" [ 1517.948142] env[63297]: _type = "Task" [ 1517.948142] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.956348] env[63297]: DEBUG oslo_vmware.api [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.998228] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "4701073f-eeee-4f37-919a-4c53663ac15f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.003246] env[63297]: DEBUG nova.compute.utils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1518.007486] env[63297]: DEBUG nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1518.007720] env[63297]: DEBUG nova.network.neutron [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1518.021046] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f2becf0-639f-4750-a9a1-61d224b643fd tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "4701073f-eeee-4f37-919a-4c53663ac15f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.619s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.021222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "4701073f-eeee-4f37-919a-4c53663ac15f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.023s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.021440] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "4701073f-eeee-4f37-919a-4c53663ac15f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.021645] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "4701073f-eeee-4f37-919a-4c53663ac15f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.021813] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "4701073f-eeee-4f37-919a-4c53663ac15f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.025115] env[63297]: INFO nova.compute.manager [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Terminating instance [ 1518.027692] env[63297]: DEBUG nova.compute.manager [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1518.028017] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1518.029656] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d05c23c-b7a2-423f-b144-1f02791559a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.038689] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1518.038789] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8e8f979-52bf-4d79-b041-e5b405c780b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.045609] env[63297]: DEBUG oslo_vmware.api [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1518.045609] env[63297]: value = "task-1697708" [ 1518.045609] env[63297]: _type = "Task" [ 1518.045609] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.053891] env[63297]: DEBUG oslo_vmware.api [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697708, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.068585] env[63297]: DEBUG nova.policy [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28ea980c339244f699047893336ee663', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54456153a472421890c889a6f2c62b38', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1518.084031] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c83c23d9-a8ec-4a87-8a8c-067e18d2615a] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1518.401807] env[63297]: DEBUG nova.network.neutron [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Successfully created port: 6875a0ac-55bd-4388-9c3d-5105d4cf1c7a {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1518.458016] env[63297]: DEBUG oslo_vmware.api [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697707, 'name': PowerOffVM_Task, 'duration_secs': 0.247819} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.458319] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1518.458481] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1518.458781] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c529191b-0419-47f5-884a-4f57ec7a346b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.512270] env[63297]: DEBUG nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1518.529234] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1518.529469] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1518.529648] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleting the datastore file [datastore1] fc54a008-eb2e-4b10-86ea-be7c82b93139 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1518.530489] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c70c154f-4b03-4d64-a490-c96598f1e2da {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.537227] env[63297]: DEBUG oslo_vmware.api [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1518.537227] env[63297]: value = "task-1697710" [ 1518.537227] env[63297]: _type = "Task" [ 1518.537227] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.545369] env[63297]: DEBUG oslo_vmware.api [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.561574] env[63297]: DEBUG oslo_vmware.api [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697708, 'name': PowerOffVM_Task, 'duration_secs': 0.203827} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.562626] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1518.562942] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1518.565743] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41e0af24-ad1c-4cec-bdf4-4937506fcfc0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.588265] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c147f97d-7fae-4364-a9c0-04978df2450f] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1518.641392] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1518.641702] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1518.641913] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleting the datastore file [datastore1] 4701073f-eeee-4f37-919a-4c53663ac15f {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1518.647016] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b232013-5268-428c-acf1-008da550530a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.651102] env[63297]: DEBUG oslo_vmware.api [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for the task: (returnval){ [ 1518.651102] env[63297]: value = "task-1697712" [ 1518.651102] env[63297]: _type = "Task" [ 1518.651102] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.659687] env[63297]: DEBUG oslo_vmware.api [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.755061] env[63297]: INFO nova.compute.manager [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Unrescuing [ 1518.755304] env[63297]: DEBUG oslo_concurrency.lockutils [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.755486] env[63297]: DEBUG oslo_concurrency.lockutils [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquired lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.755609] env[63297]: DEBUG nova.network.neutron [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1518.897030] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da5dbb1-77c3-4029-a45b-cedbba1d04c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.904726] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fed4f0e-9393-4947-888e-3b222a05261c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.935039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c988287a-1eeb-4599-9620-adc9fd49275f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.942195] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582d8401-1ac4-413f-9ceb-8f27578b0989 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.955039] env[63297]: DEBUG nova.compute.provider_tree [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1519.047338] env[63297]: DEBUG oslo_vmware.api [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159184} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.047582] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1519.047806] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1519.047970] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1519.048156] env[63297]: INFO nova.compute.manager [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1519.048393] env[63297]: DEBUG oslo.service.loopingcall [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.048575] env[63297]: DEBUG nova.compute.manager [-] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1519.048667] env[63297]: DEBUG nova.network.neutron [-] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1519.091479] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: fb33135a-073d-4d80-9833-5b29afae1cc6] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1519.160759] env[63297]: DEBUG oslo_vmware.api [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Task: {'id': task-1697712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169337} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.160922] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1519.161120] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1519.161295] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1519.161464] env[63297]: INFO nova.compute.manager [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1519.161692] env[63297]: DEBUG oslo.service.loopingcall [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.161876] env[63297]: DEBUG nova.compute.manager [-] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1519.161967] env[63297]: DEBUG nova.network.neutron [-] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1519.391402] env[63297]: DEBUG nova.compute.manager [req-7db48da3-8175-4072-be2c-3642eb6158d4 req-96b05147-a527-4d4e-870a-d3634741e275 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Received event network-vif-deleted-5a7d0713-7282-4609-a3ac-28c150824f43 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.391624] env[63297]: INFO nova.compute.manager [req-7db48da3-8175-4072-be2c-3642eb6158d4 req-96b05147-a527-4d4e-870a-d3634741e275 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Neutron deleted interface 5a7d0713-7282-4609-a3ac-28c150824f43; detaching it from the instance and deleting it from the info cache [ 1519.391811] env[63297]: DEBUG nova.network.neutron [req-7db48da3-8175-4072-be2c-3642eb6158d4 req-96b05147-a527-4d4e-870a-d3634741e275 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.412272] env[63297]: DEBUG nova.compute.manager [req-1dd5842c-cbf8-4982-b970-386386235d8b req-d899756c-ec09-4f82-8438-463a573bee70 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Received event network-vif-deleted-18133a60-2528-4b03-8ed3-ce5b7a4b6cae {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.412502] env[63297]: INFO nova.compute.manager [req-1dd5842c-cbf8-4982-b970-386386235d8b req-d899756c-ec09-4f82-8438-463a573bee70 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Neutron deleted interface 18133a60-2528-4b03-8ed3-ce5b7a4b6cae; detaching it from the instance and deleting it from the info cache [ 1519.412679] env[63297]: DEBUG nova.network.neutron [req-1dd5842c-cbf8-4982-b970-386386235d8b req-d899756c-ec09-4f82-8438-463a573bee70 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.458342] env[63297]: DEBUG nova.scheduler.client.report [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1519.525099] env[63297]: DEBUG nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1519.540127] env[63297]: DEBUG nova.network.neutron [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Updating instance_info_cache with network_info: [{"id": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "address": "fa:16:3e:db:10:68", "network": {"id": "5f1806b3-2bca-4ef0-8011-77ce4207d8e4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-78228628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "29c5c1c260474315a1a34b83a8054983", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca287ba8-29", "ovs_interfaceid": "ca287ba8-2972-46d4-93dd-a3c4ce330623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.550363] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1519.550614] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1519.550773] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1519.550953] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1519.551122] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1519.551316] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1519.551681] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1519.551856] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1519.552072] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1519.552247] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1519.552425] env[63297]: DEBUG nova.virt.hardware [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1519.553578] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3403178-0494-4a34-a1e1-0df4adfd8d72 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.562448] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06635032-479b-42d4-967f-5b61291b2e95 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.595018] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: dc196e68-b725-43a1-9848-e84d1b138245] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1519.868619] env[63297]: DEBUG nova.network.neutron [-] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.873780] env[63297]: DEBUG nova.network.neutron [-] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.894103] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e93afc34-d0d0-4307-bf36-6253af19a605 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.904095] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790bc389-f5eb-4dc1-8cd6-efdad3dbd988 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.919894] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51da2e5f-aa03-421f-9887-f3ebb09c3d1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.936608] env[63297]: DEBUG nova.compute.manager [req-7db48da3-8175-4072-be2c-3642eb6158d4 req-96b05147-a527-4d4e-870a-d3634741e275 service nova] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Detach interface failed, port_id=5a7d0713-7282-4609-a3ac-28c150824f43, reason: Instance 4701073f-eeee-4f37-919a-4c53663ac15f could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1519.940739] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152423f3-8499-4792-9ed0-db247f4f23e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.974424] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.975382] env[63297]: DEBUG nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1519.978743] env[63297]: DEBUG nova.compute.manager [req-1dd5842c-cbf8-4982-b970-386386235d8b req-d899756c-ec09-4f82-8438-463a573bee70 service nova] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Detach interface failed, port_id=18133a60-2528-4b03-8ed3-ce5b7a4b6cae, reason: Instance fc54a008-eb2e-4b10-86ea-be7c82b93139 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1519.978743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.918s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.978743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.980288] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.424s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.981724] env[63297]: INFO nova.compute.claims [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1520.017864] env[63297]: DEBUG nova.network.neutron [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Successfully updated port: 6875a0ac-55bd-4388-9c3d-5105d4cf1c7a {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1520.023332] env[63297]: INFO nova.scheduler.client.report [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted allocations for instance 8fa5fef6-8768-4e24-aab3-db56a10588c2 [ 1520.044672] env[63297]: DEBUG oslo_concurrency.lockutils [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Releasing lock "refresh_cache-5914b3ce-f40f-4782-b56a-9fc29c819938" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.044672] env[63297]: DEBUG nova.objects.instance [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lazy-loading 'flavor' on Instance uuid 5914b3ce-f40f-4782-b56a-9fc29c819938 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1520.101116] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 63785911-ea55-4aeb-9ba2-6cea5ddd9cae] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1520.372727] env[63297]: INFO nova.compute.manager [-] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Took 1.21 seconds to deallocate network for instance. [ 1520.377429] env[63297]: INFO nova.compute.manager [-] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Took 1.33 seconds to deallocate network for instance. [ 1520.487128] env[63297]: DEBUG nova.compute.utils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1520.490780] env[63297]: DEBUG nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1520.490970] env[63297]: DEBUG nova.network.neutron [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1520.523556] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.523859] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.523859] env[63297]: DEBUG nova.network.neutron [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1520.532220] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa55bbfb-a459-4428-a1a2-7c557f0c117e tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "8fa5fef6-8768-4e24-aab3-db56a10588c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.425s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.549142] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11be0db-5e92-4255-820b-533d4770c328 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.570406] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1520.572037] env[63297]: DEBUG nova.policy [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28ea980c339244f699047893336ee663', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54456153a472421890c889a6f2c62b38', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1520.573400] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5ef9dcc-d053-4cc0-a882-caa850743380 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.581072] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1520.581072] env[63297]: value = "task-1697713" [ 1520.581072] env[63297]: _type = "Task" [ 1520.581072] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.589826] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.605203] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 6ce88b93-aa42-4f34-81fa-6c09c23ace81] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1520.790291] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "14b4518e-044a-451a-845d-fa3742e5b3e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.790291] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "14b4518e-044a-451a-845d-fa3742e5b3e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.790291] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "14b4518e-044a-451a-845d-fa3742e5b3e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.790503] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "14b4518e-044a-451a-845d-fa3742e5b3e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.790640] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "14b4518e-044a-451a-845d-fa3742e5b3e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.793029] env[63297]: INFO nova.compute.manager [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Terminating instance [ 1520.795199] env[63297]: DEBUG nova.compute.manager [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1520.795199] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1520.795631] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de41ef1-5eac-44a2-9707-52176b812930 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.803427] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1520.803685] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9e079df-c48f-431d-b59c-09d23ba1fff5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.813299] env[63297]: DEBUG oslo_vmware.api [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1520.813299] env[63297]: value = "task-1697714" [ 1520.813299] env[63297]: _type = "Task" [ 1520.813299] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.821233] env[63297]: DEBUG oslo_vmware.api [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.880193] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.883359] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.884873] env[63297]: DEBUG nova.network.neutron [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Successfully created port: b7115a0d-4014-408b-b05e-52f08768ec9e {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1520.996897] env[63297]: DEBUG nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1521.078865] env[63297]: DEBUG nova.network.neutron [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1521.097553] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697713, 'name': PowerOffVM_Task, 'duration_secs': 0.203563} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.098026] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1521.103374] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1521.106637] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23222edb-7d14-4aba-abdb-291ca02638ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.124751] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 192c3a5d-3a23-4f78-8dc7-a256b6d9381d] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1521.131144] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1521.131144] env[63297]: value = "task-1697715" [ 1521.131144] env[63297]: _type = "Task" [ 1521.131144] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.144531] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697715, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.249056] env[63297]: DEBUG nova.network.neutron [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Updating instance_info_cache with network_info: [{"id": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "address": "fa:16:3e:10:ae:3e", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6875a0ac-55", "ovs_interfaceid": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.320579] env[63297]: DEBUG oslo_vmware.api [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697714, 'name': PowerOffVM_Task, 'duration_secs': 0.277031} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.322966] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1521.323159] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1521.323582] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d79e08c2-9fe2-4a8b-a550-a6a6815d76f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.394913] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfd69e1-6f85-46c0-bdfb-1e54bc953adf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.398893] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1521.399136] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1521.399315] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleting the datastore file [datastore1] 14b4518e-044a-451a-845d-fa3742e5b3e2 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1521.399919] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-887b0f40-59e7-43de-875d-7bfadf5c99bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.404657] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017a1bdd-9d4c-42bb-8361-dacbd19a9d52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.408528] env[63297]: DEBUG oslo_vmware.api [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1521.408528] env[63297]: value = "task-1697717" [ 1521.408528] env[63297]: _type = "Task" [ 1521.408528] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.441280] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a739de5-73a7-4a5b-b594-4e18ac5465fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.445332] env[63297]: DEBUG nova.compute.manager [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Received event network-vif-plugged-6875a0ac-55bd-4388-9c3d-5105d4cf1c7a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1521.445603] env[63297]: DEBUG oslo_concurrency.lockutils [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] Acquiring lock "42d872d6-da12-474b-8741-1d991d507cfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.445844] env[63297]: DEBUG oslo_concurrency.lockutils [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] Lock "42d872d6-da12-474b-8741-1d991d507cfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.446057] env[63297]: DEBUG oslo_concurrency.lockutils [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] Lock "42d872d6-da12-474b-8741-1d991d507cfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.446244] env[63297]: DEBUG nova.compute.manager [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] No waiting events found dispatching network-vif-plugged-6875a0ac-55bd-4388-9c3d-5105d4cf1c7a {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1521.446453] env[63297]: WARNING nova.compute.manager [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Received unexpected event network-vif-plugged-6875a0ac-55bd-4388-9c3d-5105d4cf1c7a for instance with vm_state building and task_state spawning. [ 1521.446651] env[63297]: DEBUG nova.compute.manager [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Received event network-changed-6875a0ac-55bd-4388-9c3d-5105d4cf1c7a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1521.446881] env[63297]: DEBUG nova.compute.manager [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Refreshing instance network info cache due to event network-changed-6875a0ac-55bd-4388-9c3d-5105d4cf1c7a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1521.447111] env[63297]: DEBUG oslo_concurrency.lockutils [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] Acquiring lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.451167] env[63297]: DEBUG oslo_vmware.api [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.456438] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48959c3a-cf46-4a7b-b6a2-6fee3af48d1c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.471610] env[63297]: DEBUG nova.compute.provider_tree [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.627457] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c4e96403-895c-479d-bfb2-274a87446bf9] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1521.641058] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697715, 'name': ReconfigVM_Task, 'duration_secs': 0.206036} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.641365] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1521.641554] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1521.641805] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3a92ebd-a1e9-4b2f-892e-379c05a05e57 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.648238] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1521.648238] env[63297]: value = "task-1697718" [ 1521.648238] env[63297]: _type = "Task" [ 1521.648238] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.656991] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697718, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.752036] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.752489] env[63297]: DEBUG nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Instance network_info: |[{"id": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "address": "fa:16:3e:10:ae:3e", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6875a0ac-55", "ovs_interfaceid": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1521.752833] env[63297]: DEBUG oslo_concurrency.lockutils [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] Acquired lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.753060] env[63297]: DEBUG nova.network.neutron [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Refreshing network info cache for port 6875a0ac-55bd-4388-9c3d-5105d4cf1c7a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1521.754367] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:ae:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6875a0ac-55bd-4388-9c3d-5105d4cf1c7a', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1521.762159] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Creating folder: Project (54456153a472421890c889a6f2c62b38). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1521.765113] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b4e020c-8ae4-4c5b-a8b8-0b2fd2f1d00a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.776019] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Created folder: Project (54456153a472421890c889a6f2c62b38) in parent group-v353718. [ 1521.776216] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Creating folder: Instances. Parent ref: group-v353931. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1521.776449] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d50891c-4726-404c-acfe-1c458e3ae029 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.786423] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Created folder: Instances in parent group-v353931. [ 1521.786661] env[63297]: DEBUG oslo.service.loopingcall [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1521.786922] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1521.787136] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-265b75d4-25bf-477e-ad8d-e26a59aefa66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.808604] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1521.808604] env[63297]: value = "task-1697721" [ 1521.808604] env[63297]: _type = "Task" [ 1521.808604] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.816461] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697721, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.919414] env[63297]: DEBUG oslo_vmware.api [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132156} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.919674] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1521.919858] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1521.920045] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1521.920221] env[63297]: INFO nova.compute.manager [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1521.920461] env[63297]: DEBUG oslo.service.loopingcall [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1521.920656] env[63297]: DEBUG nova.compute.manager [-] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1521.920744] env[63297]: DEBUG nova.network.neutron [-] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1521.974542] env[63297]: DEBUG nova.scheduler.client.report [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1522.006292] env[63297]: DEBUG nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1522.039030] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1522.039030] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1522.039213] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1522.039319] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1522.039470] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1522.039618] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1522.039826] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1522.039988] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1522.040232] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1522.040327] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1522.040497] env[63297]: DEBUG nova.virt.hardware [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1522.041477] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43753a5-5890-4bf0-a3dd-3e9bdb4f4887 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.049467] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c337c6a-aae6-4d97-a7f2-d9a2be17bd20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.079498] env[63297]: DEBUG nova.network.neutron [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Updated VIF entry in instance network info cache for port 6875a0ac-55bd-4388-9c3d-5105d4cf1c7a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1522.080069] env[63297]: DEBUG nova.network.neutron [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Updating instance_info_cache with network_info: [{"id": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "address": "fa:16:3e:10:ae:3e", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6875a0ac-55", "ovs_interfaceid": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.132156] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: a57c0638-e14b-4474-a6b4-7184d7e2a0fe] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1522.162380] env[63297]: DEBUG oslo_vmware.api [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697718, 'name': PowerOnVM_Task, 'duration_secs': 0.378867} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.162850] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1522.163271] env[63297]: DEBUG nova.compute.manager [None req-86e21fd6-d480-46e5-8a10-e7118aa1011b tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1522.165018] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5b68b6-9da8-4a07-b706-c179499ec6f0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.319680] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697721, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.461162] env[63297]: DEBUG nova.compute.manager [req-5508f251-54fa-4f76-b916-aee66f351425 req-4203dfc7-1902-4dfd-83f3-2328ea281f8b service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Received event network-vif-deleted-1c87cf43-e466-4636-a53a-8c75e95f185d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.461162] env[63297]: INFO nova.compute.manager [req-5508f251-54fa-4f76-b916-aee66f351425 req-4203dfc7-1902-4dfd-83f3-2328ea281f8b service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Neutron deleted interface 1c87cf43-e466-4636-a53a-8c75e95f185d; detaching it from the instance and deleting it from the info cache [ 1522.461162] env[63297]: DEBUG nova.network.neutron [req-5508f251-54fa-4f76-b916-aee66f351425 req-4203dfc7-1902-4dfd-83f3-2328ea281f8b service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.482024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.482024] env[63297]: DEBUG nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1522.483056] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.714s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.484614] env[63297]: INFO nova.compute.claims [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1522.529641] env[63297]: DEBUG nova.network.neutron [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Successfully updated port: b7115a0d-4014-408b-b05e-52f08768ec9e {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1522.582462] env[63297]: DEBUG oslo_concurrency.lockutils [req-4c87b324-be4a-4f24-a33d-e4dc8a0f7502 req-09187853-dfb1-4bfc-b481-a9fca0614986 service nova] Releasing lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.635679] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 41b1ce5d-a8ac-4b93-94a3-cf26367266d6] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1522.819369] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697721, 'name': CreateVM_Task, 'duration_secs': 0.730448} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.819559] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1522.820248] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.820408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.820735] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1522.820993] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06a478df-ff90-4c37-a9af-fae372e3c940 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.825896] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1522.825896] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52faa6a2-9f29-3588-262b-3fb0a4cdd5e7" [ 1522.825896] env[63297]: _type = "Task" [ 1522.825896] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.833720] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52faa6a2-9f29-3588-262b-3fb0a4cdd5e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.936993] env[63297]: DEBUG nova.network.neutron [-] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.962940] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1a51e57-65da-42ba-81b0-23775f4151c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.972629] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbb4d59-0067-4f51-bcb2-bada920db1e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.984661] env[63297]: DEBUG nova.compute.utils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1522.986093] env[63297]: DEBUG nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1522.986291] env[63297]: DEBUG nova.network.neutron [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1523.727145] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.727561] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.727561] env[63297]: DEBUG nova.network.neutron [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1523.732020] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 272180b9-e79b-4714-b28b-470937509f42] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1523.732020] env[63297]: INFO nova.compute.manager [-] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Took 1.81 seconds to deallocate network for instance. [ 1523.732020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "5914b3ce-f40f-4782-b56a-9fc29c819938" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.732020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "5914b3ce-f40f-4782-b56a-9fc29c819938" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.732020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "5914b3ce-f40f-4782-b56a-9fc29c819938-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.732020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "5914b3ce-f40f-4782-b56a-9fc29c819938-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.732020] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "5914b3ce-f40f-4782-b56a-9fc29c819938-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.732918] env[63297]: DEBUG nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1523.737528] env[63297]: DEBUG nova.compute.manager [req-5508f251-54fa-4f76-b916-aee66f351425 req-4203dfc7-1902-4dfd-83f3-2328ea281f8b service nova] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Detach interface failed, port_id=1c87cf43-e466-4636-a53a-8c75e95f185d, reason: Instance 14b4518e-044a-451a-845d-fa3742e5b3e2 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1523.739115] env[63297]: DEBUG nova.policy [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ab498375eb47a3923ac10343c11d34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d5cb4b4799b4b8b99648e718dbc0254', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1523.741073] env[63297]: INFO nova.compute.manager [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Terminating instance [ 1523.745103] env[63297]: DEBUG nova.compute.manager [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Received event network-vif-plugged-b7115a0d-4014-408b-b05e-52f08768ec9e {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1523.745103] env[63297]: DEBUG oslo_concurrency.lockutils [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] Acquiring lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.745103] env[63297]: DEBUG oslo_concurrency.lockutils [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] Lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.745269] env[63297]: DEBUG oslo_concurrency.lockutils [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] Lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.745411] env[63297]: DEBUG nova.compute.manager [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] No waiting events found dispatching network-vif-plugged-b7115a0d-4014-408b-b05e-52f08768ec9e {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1523.745562] env[63297]: WARNING nova.compute.manager [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Received unexpected event network-vif-plugged-b7115a0d-4014-408b-b05e-52f08768ec9e for instance with vm_state building and task_state spawning. [ 1523.746724] env[63297]: DEBUG nova.compute.manager [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Received event network-changed-b7115a0d-4014-408b-b05e-52f08768ec9e {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1523.746724] env[63297]: DEBUG nova.compute.manager [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Refreshing instance network info cache due to event network-changed-b7115a0d-4014-408b-b05e-52f08768ec9e. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1523.746724] env[63297]: DEBUG oslo_concurrency.lockutils [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] Acquiring lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.754019] env[63297]: DEBUG nova.compute.manager [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1523.754019] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1523.755484] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422b6acf-1b6a-4548-a69e-5df311b18c89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.771611] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52faa6a2-9f29-3588-262b-3fb0a4cdd5e7, 'name': SearchDatastore_Task, 'duration_secs': 0.01198} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.772230] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1523.772729] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.772953] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1523.773420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.773420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.773557] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1523.773987] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb20cdaa-b4d3-40c7-9dcc-5501a131c8b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.775505] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82e0eda5-524f-4b6b-bc64-d467dde6629e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.782916] env[63297]: DEBUG oslo_vmware.api [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1523.782916] env[63297]: value = "task-1697722" [ 1523.782916] env[63297]: _type = "Task" [ 1523.782916] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.789619] env[63297]: DEBUG nova.network.neutron [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1523.791456] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1523.791636] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1523.794215] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c708477-0a50-4877-826b-c05eee4b438b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.800642] env[63297]: DEBUG oslo_vmware.api [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.804419] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1523.804419] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5214936a-16a9-cea4-ae7a-f500d4ad8b76" [ 1523.804419] env[63297]: _type = "Task" [ 1523.804419] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.816086] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5214936a-16a9-cea4-ae7a-f500d4ad8b76, 'name': SearchDatastore_Task, 'duration_secs': 0.009021} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.817395] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72d9698e-e6db-4cde-b1b7-47da4521f22f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.825982] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1523.825982] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de06e9-a833-1bd1-aa9a-b811a8cb94b0" [ 1523.825982] env[63297]: _type = "Task" [ 1523.825982] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.833817] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de06e9-a833-1bd1-aa9a-b811a8cb94b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.990641] env[63297]: DEBUG nova.network.neutron [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Updating instance_info_cache with network_info: [{"id": "b7115a0d-4014-408b-b05e-52f08768ec9e", "address": "fa:16:3e:70:79:be", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7115a0d-40", "ovs_interfaceid": "b7115a0d-4014-408b-b05e-52f08768ec9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.051769] env[63297]: DEBUG nova.network.neutron [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Successfully created port: 5da611f1-76bd-4f99-a624-d504e942a954 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1524.160120] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b5c8b8-bfd3-4c7e-a6f5-194edecb4a32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.168258] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745fed15-8122-433d-883a-104892cd015c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.211763] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebb2e1c-9ccf-494e-b649-cb24e7db26a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.222938] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd924c11-7059-426d-8705-b69bc84602fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.249747] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 4e6b1296-9e19-4047-9c38-dc94c686d0cb] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1524.252593] env[63297]: DEBUG nova.compute.provider_tree [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.262836] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.293121] env[63297]: DEBUG oslo_vmware.api [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697722, 'name': PowerOffVM_Task, 'duration_secs': 0.162818} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.293381] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1524.293549] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1524.293791] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac85e782-5fd0-4768-b6a1-ec9b9ccc1280 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.336701] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de06e9-a833-1bd1-aa9a-b811a8cb94b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009108} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.337017] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.337289] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa/42d872d6-da12-474b-8741-1d991d507cfa.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1524.337592] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3af036e-6acd-43c6-90e4-7bcb6f9a20b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.345085] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1524.345085] env[63297]: value = "task-1697724" [ 1524.345085] env[63297]: _type = "Task" [ 1524.345085] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.353974] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697724, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.365148] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1524.365425] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1524.365610] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Deleting the datastore file [datastore1] 5914b3ce-f40f-4782-b56a-9fc29c819938 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1524.365861] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec407a4a-a301-457a-8823-4c87646a96b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.377324] env[63297]: DEBUG oslo_vmware.api [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1524.377324] env[63297]: value = "task-1697725" [ 1524.377324] env[63297]: _type = "Task" [ 1524.377324] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.385213] env[63297]: DEBUG oslo_vmware.api [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697725, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.493717] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.494040] env[63297]: DEBUG nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Instance network_info: |[{"id": "b7115a0d-4014-408b-b05e-52f08768ec9e", "address": "fa:16:3e:70:79:be", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7115a0d-40", "ovs_interfaceid": "b7115a0d-4014-408b-b05e-52f08768ec9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1524.494346] env[63297]: DEBUG oslo_concurrency.lockutils [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] Acquired lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.494496] env[63297]: DEBUG nova.network.neutron [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Refreshing network info cache for port b7115a0d-4014-408b-b05e-52f08768ec9e {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1524.495764] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:79:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7115a0d-4014-408b-b05e-52f08768ec9e', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1524.503932] env[63297]: DEBUG oslo.service.loopingcall [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.507073] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1524.507608] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1de29e9e-f30f-4790-bfb3-bcfa0d418fb4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.528593] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1524.528593] env[63297]: value = "task-1697726" [ 1524.528593] env[63297]: _type = "Task" [ 1524.528593] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.537010] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697726, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.754966] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 99cc8af3-5c18-4839-94db-996861e0c276] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1524.758452] env[63297]: DEBUG nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1524.761581] env[63297]: DEBUG nova.scheduler.client.report [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1524.779041] env[63297]: DEBUG nova.network.neutron [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Updated VIF entry in instance network info cache for port b7115a0d-4014-408b-b05e-52f08768ec9e. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1524.779572] env[63297]: DEBUG nova.network.neutron [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Updating instance_info_cache with network_info: [{"id": "b7115a0d-4014-408b-b05e-52f08768ec9e", "address": "fa:16:3e:70:79:be", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7115a0d-40", "ovs_interfaceid": "b7115a0d-4014-408b-b05e-52f08768ec9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.793117] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1524.793391] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1524.793547] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.793775] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1524.793964] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.794136] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1524.794351] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1524.794510] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1524.794677] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1524.794881] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1524.795068] env[63297]: DEBUG nova.virt.hardware [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1524.796886] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efcacf1-bad4-42eb-9285-78febeafbfe8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.806083] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1706e97e-39b0-4b26-9bb4-7cb0860f9e5e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.853960] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697724, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488152} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.854231] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa/42d872d6-da12-474b-8741-1d991d507cfa.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1524.854417] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1524.854659] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b02ad62-b1cf-405f-b3fd-4502e011c0d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.861659] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1524.861659] env[63297]: value = "task-1697727" [ 1524.861659] env[63297]: _type = "Task" [ 1524.861659] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.871282] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.886292] env[63297]: DEBUG oslo_vmware.api [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697725, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227309} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.886505] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1524.886687] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1524.886882] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1524.887042] env[63297]: INFO nova.compute.manager [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1524.887802] env[63297]: DEBUG oslo.service.loopingcall [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.887999] env[63297]: DEBUG nova.compute.manager [-] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1524.888110] env[63297]: DEBUG nova.network.neutron [-] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1525.042493] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697726, 'name': CreateVM_Task, 'duration_secs': 0.366063} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.042684] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1525.043365] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.043531] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.044517] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1525.044517] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d62e8ea-3e73-4edb-a40c-28c64a106afa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.049038] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1525.049038] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521344f8-6d1c-cf65-dca6-b43b99319ddf" [ 1525.049038] env[63297]: _type = "Task" [ 1525.049038] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.056740] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521344f8-6d1c-cf65-dca6-b43b99319ddf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.221023] env[63297]: DEBUG nova.compute.manager [req-e30fa6c5-2f13-42c3-a229-05d32e663fd4 req-6dc32403-f054-46f4-b598-1a8616f46a19 service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Received event network-vif-deleted-ca287ba8-2972-46d4-93dd-a3c4ce330623 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1525.221216] env[63297]: INFO nova.compute.manager [req-e30fa6c5-2f13-42c3-a229-05d32e663fd4 req-6dc32403-f054-46f4-b598-1a8616f46a19 service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Neutron deleted interface ca287ba8-2972-46d4-93dd-a3c4ce330623; detaching it from the instance and deleting it from the info cache [ 1525.221391] env[63297]: DEBUG nova.network.neutron [req-e30fa6c5-2f13-42c3-a229-05d32e663fd4 req-6dc32403-f054-46f4-b598-1a8616f46a19 service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.261154] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 5e158880-81a6-4d35-b1df-6fd59ba4a8ff] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1525.267017] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.783s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.267112] env[63297]: DEBUG nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1525.269769] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.750s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.269962] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.273079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.224s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.273079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.273834] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.791s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.273917] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.275449] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.515s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.276910] env[63297]: INFO nova.compute.claims [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1525.283082] env[63297]: DEBUG oslo_concurrency.lockutils [req-bd9c60df-92d3-4cf7-b0ef-58b4b46d57ef req-9e1f7d36-c923-4729-aef9-ec19028e8617 service nova] Releasing lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.298867] env[63297]: INFO nova.scheduler.client.report [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Deleted allocations for instance 5124f7fb-1293-4964-98c4-426ecfce7d10 [ 1525.300536] env[63297]: INFO nova.scheduler.client.report [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted allocations for instance 71faf167-dfe3-4792-9841-b5ab4b333884 [ 1525.311396] env[63297]: INFO nova.scheduler.client.report [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Deleted allocations for instance b95b7656-70ac-4eaf-9934-4b4c50e78035 [ 1525.371389] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068247} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.371646] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1525.372428] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a083fd5e-7c84-4803-852e-75f94139b19e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.395054] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa/42d872d6-da12-474b-8741-1d991d507cfa.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1525.395054] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99bbeaca-f0e1-45fc-a3d6-a2676d0bd3e7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.416574] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1525.416574] env[63297]: value = "task-1697728" [ 1525.416574] env[63297]: _type = "Task" [ 1525.416574] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.425805] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697728, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.561009] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521344f8-6d1c-cf65-dca6-b43b99319ddf, 'name': SearchDatastore_Task, 'duration_secs': 0.060124} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.561339] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.561764] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1525.562208] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.562396] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.562716] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1525.563081] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa0bc4df-c612-41f4-8479-37d3b17a9d49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.580931] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1525.580980] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1525.581719] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e6ef0d0-b11b-4d51-a223-b6f60445ef7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.588044] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1525.588044] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aa0b42-efef-d07e-f314-4260525fc8b3" [ 1525.588044] env[63297]: _type = "Task" [ 1525.588044] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.589377] env[63297]: DEBUG nova.compute.manager [req-c0d4a180-9f86-4acb-8178-32ef041b578a req-a6b26805-f69a-4579-b0cb-a5b04e976a50 service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Received event network-vif-plugged-5da611f1-76bd-4f99-a624-d504e942a954 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1525.589626] env[63297]: DEBUG oslo_concurrency.lockutils [req-c0d4a180-9f86-4acb-8178-32ef041b578a req-a6b26805-f69a-4579-b0cb-a5b04e976a50 service nova] Acquiring lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.589776] env[63297]: DEBUG oslo_concurrency.lockutils [req-c0d4a180-9f86-4acb-8178-32ef041b578a req-a6b26805-f69a-4579-b0cb-a5b04e976a50 service nova] Lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.589943] env[63297]: DEBUG oslo_concurrency.lockutils [req-c0d4a180-9f86-4acb-8178-32ef041b578a req-a6b26805-f69a-4579-b0cb-a5b04e976a50 service nova] Lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.590125] env[63297]: DEBUG nova.compute.manager [req-c0d4a180-9f86-4acb-8178-32ef041b578a req-a6b26805-f69a-4579-b0cb-a5b04e976a50 service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] No waiting events found dispatching network-vif-plugged-5da611f1-76bd-4f99-a624-d504e942a954 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1525.590284] env[63297]: WARNING nova.compute.manager [req-c0d4a180-9f86-4acb-8178-32ef041b578a req-a6b26805-f69a-4579-b0cb-a5b04e976a50 service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Received unexpected event network-vif-plugged-5da611f1-76bd-4f99-a624-d504e942a954 for instance with vm_state building and task_state spawning. [ 1525.598741] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aa0b42-efef-d07e-f314-4260525fc8b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.647205] env[63297]: DEBUG nova.network.neutron [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Successfully updated port: 5da611f1-76bd-4f99-a624-d504e942a954 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1525.695261] env[63297]: DEBUG nova.network.neutron [-] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.724054] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-225dc2b4-677a-4d9c-bd9d-4d035da62aed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.735999] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301edccd-8b55-4719-bc51-ccfc753b39b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.772497] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: f429dd9b-be6c-4e90-876b-3a3931fb1c4a] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1525.776065] env[63297]: DEBUG nova.compute.utils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.777350] env[63297]: DEBUG nova.compute.manager [req-e30fa6c5-2f13-42c3-a229-05d32e663fd4 req-6dc32403-f054-46f4-b598-1a8616f46a19 service nova] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Detach interface failed, port_id=ca287ba8-2972-46d4-93dd-a3c4ce330623, reason: Instance 5914b3ce-f40f-4782-b56a-9fc29c819938 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1525.778055] env[63297]: DEBUG nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1525.778267] env[63297]: DEBUG nova.network.neutron [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1525.811942] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4fff9dcf-65cd-4e0e-a449-79c435b948cf tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "5124f7fb-1293-4964-98c4-426ecfce7d10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.625s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.812557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8c7d3c47-55a0-4ef1-a304-f6387197604d tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "71faf167-dfe3-4792-9841-b5ab4b333884" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.951s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.819245] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aee56113-9850-4fb3-a328-46d1cc45655d tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "b95b7656-70ac-4eaf-9934-4b4c50e78035" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.870s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.844802] env[63297]: DEBUG nova.policy [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6462fe5fe5b4032b9ab7bde9875e81a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '865def46f5404d40b485bb482ed9a05b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1525.927438] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697728, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.099520] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aa0b42-efef-d07e-f314-4260525fc8b3, 'name': SearchDatastore_Task, 'duration_secs': 0.071223} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.100375] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3763e257-bf40-43db-b4ff-74045be1d718 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.105441] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1526.105441] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527ff269-aacc-f271-714e-f39529c476af" [ 1526.105441] env[63297]: _type = "Task" [ 1526.105441] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.114473] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527ff269-aacc-f271-714e-f39529c476af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.146179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "refresh_cache-44f4776e-d4a1-40ad-a03b-bb03582b95bd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.146319] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "refresh_cache-44f4776e-d4a1-40ad-a03b-bb03582b95bd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.146972] env[63297]: DEBUG nova.network.neutron [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1526.200824] env[63297]: INFO nova.compute.manager [-] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Took 1.31 seconds to deallocate network for instance. [ 1526.221137] env[63297]: DEBUG nova.network.neutron [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Successfully created port: 4edba2de-a7ec-4a5d-889c-b76110d2b060 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1526.279228] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: d15a7e98-755b-4c5c-ba34-dc5fc3f8846d] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1526.281363] env[63297]: DEBUG nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1526.428866] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697728, 'name': ReconfigVM_Task, 'duration_secs': 0.825485} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.429368] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa/42d872d6-da12-474b-8741-1d991d507cfa.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1526.430272] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78a59e96-effc-4bd4-9139-ef9db8280f6c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.440112] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1526.440112] env[63297]: value = "task-1697729" [ 1526.440112] env[63297]: _type = "Task" [ 1526.440112] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.447831] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697729, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.616145] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527ff269-aacc-f271-714e-f39529c476af, 'name': SearchDatastore_Task, 'duration_secs': 0.009321} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.616538] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.616876] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a/b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1526.617315] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-340c309f-4007-431a-9d1d-2c64d091cbfc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.623944] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1526.623944] env[63297]: value = "task-1697730" [ 1526.623944] env[63297]: _type = "Task" [ 1526.623944] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.633825] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.639861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "9b1306f9-4b0a-4116-8e79-271478f33490" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.639861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.640321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.640599] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.640823] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.642943] env[63297]: INFO nova.compute.manager [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Terminating instance [ 1526.645046] env[63297]: DEBUG nova.compute.manager [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1526.645314] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1526.646247] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b2dc36-1666-496d-b17e-1097ddcec711 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.656734] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1526.658068] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dba55f08-65bc-4bc0-b24b-e98ba2285ad5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.660480] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedd0afe-e13b-42a9-a2d0-cb5d00eb5cee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.668861] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a8e0ff-b8e1-48bb-ac38-818831599104 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.672613] env[63297]: DEBUG oslo_vmware.api [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1526.672613] env[63297]: value = "task-1697731" [ 1526.672613] env[63297]: _type = "Task" [ 1526.672613] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.703094] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0202ac2-39d1-4700-bfe7-9dc944681ef6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.710278] env[63297]: DEBUG nova.network.neutron [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1526.712411] env[63297]: DEBUG oslo_vmware.api [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.713952] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.718423] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d81f15-05a8-42d4-9fba-6426fc38ba5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.734728] env[63297]: DEBUG nova.compute.provider_tree [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1526.785936] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 2d7b237e-f86d-42b1-ab04-320f0012a2d1] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1526.932426] env[63297]: DEBUG nova.network.neutron [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Updating instance_info_cache with network_info: [{"id": "5da611f1-76bd-4f99-a624-d504e942a954", "address": "fa:16:3e:74:4c:fc", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da611f1-76", "ovs_interfaceid": "5da611f1-76bd-4f99-a624-d504e942a954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.951788] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697729, 'name': Rename_Task, 'duration_secs': 0.194491} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.951923] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1526.952600] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c1121f4-bf6e-41cf-bc58-b1d2a35429be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.960152] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1526.960152] env[63297]: value = "task-1697732" [ 1526.960152] env[63297]: _type = "Task" [ 1526.960152] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.971428] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697732, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.133880] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697730, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.183671] env[63297]: DEBUG oslo_vmware.api [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697731, 'name': PowerOffVM_Task, 'duration_secs': 0.304414} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.183671] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1527.183671] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1527.183671] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36c409d0-054a-4f0c-9634-36be0225f98c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.243030] env[63297]: DEBUG nova.scheduler.client.report [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.293835] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 8bc4bb67-bc00-44c6-9c83-c0a1072142e6] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1527.298921] env[63297]: DEBUG nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1527.311539] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1527.311539] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1527.311539] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Deleting the datastore file [datastore1] 9b1306f9-4b0a-4116-8e79-271478f33490 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1527.311539] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb6a5ea3-8146-4ccc-b781-692eb3bf526f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.319416] env[63297]: DEBUG oslo_vmware.api [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for the task: (returnval){ [ 1527.319416] env[63297]: value = "task-1697734" [ 1527.319416] env[63297]: _type = "Task" [ 1527.319416] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.329713] env[63297]: DEBUG oslo_vmware.api [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.331921] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1527.332166] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1527.332338] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1527.332517] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1527.332682] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1527.332829] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1527.333054] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1527.333224] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1527.333390] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1527.333564] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1527.333748] env[63297]: DEBUG nova.virt.hardware [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1527.334782] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635eb4f5-08d1-4364-9ce4-758967556ce5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.342071] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07840fe3-5509-4811-8027-860056755d74 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.436032] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "refresh_cache-44f4776e-d4a1-40ad-a03b-bb03582b95bd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.436152] env[63297]: DEBUG nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Instance network_info: |[{"id": "5da611f1-76bd-4f99-a624-d504e942a954", "address": "fa:16:3e:74:4c:fc", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da611f1-76", "ovs_interfaceid": "5da611f1-76bd-4f99-a624-d504e942a954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1527.436607] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:4c:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5da611f1-76bd-4f99-a624-d504e942a954', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1527.448812] env[63297]: DEBUG oslo.service.loopingcall [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.449104] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1527.449422] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9de993f2-1cbd-405c-bd58-08320f4f9032 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.473673] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697732, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.475015] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1527.475015] env[63297]: value = "task-1697735" [ 1527.475015] env[63297]: _type = "Task" [ 1527.475015] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.485432] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697735, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.613955] env[63297]: DEBUG nova.compute.manager [req-9ec6bfb2-a663-4cc8-900e-7d0e4c5f6a08 req-c8080b81-8209-4fd0-ab23-6d03cf0f31da service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Received event network-changed-5da611f1-76bd-4f99-a624-d504e942a954 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1527.614170] env[63297]: DEBUG nova.compute.manager [req-9ec6bfb2-a663-4cc8-900e-7d0e4c5f6a08 req-c8080b81-8209-4fd0-ab23-6d03cf0f31da service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Refreshing instance network info cache due to event network-changed-5da611f1-76bd-4f99-a624-d504e942a954. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1527.614367] env[63297]: DEBUG oslo_concurrency.lockutils [req-9ec6bfb2-a663-4cc8-900e-7d0e4c5f6a08 req-c8080b81-8209-4fd0-ab23-6d03cf0f31da service nova] Acquiring lock "refresh_cache-44f4776e-d4a1-40ad-a03b-bb03582b95bd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.614513] env[63297]: DEBUG oslo_concurrency.lockutils [req-9ec6bfb2-a663-4cc8-900e-7d0e4c5f6a08 req-c8080b81-8209-4fd0-ab23-6d03cf0f31da service nova] Acquired lock "refresh_cache-44f4776e-d4a1-40ad-a03b-bb03582b95bd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.614674] env[63297]: DEBUG nova.network.neutron [req-9ec6bfb2-a663-4cc8-900e-7d0e4c5f6a08 req-c8080b81-8209-4fd0-ab23-6d03cf0f31da service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Refreshing network info cache for port 5da611f1-76bd-4f99-a624-d504e942a954 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1527.634584] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523314} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.634852] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a/b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1527.635084] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1527.635334] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd6a1fa1-6e4e-4003-ac08-b50952d2ed93 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.642324] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1527.642324] env[63297]: value = "task-1697736" [ 1527.642324] env[63297]: _type = "Task" [ 1527.642324] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.651690] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.747571] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.748290] env[63297]: DEBUG nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1527.751463] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.369s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.753257] env[63297]: INFO nova.compute.claims [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1527.802868] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 581f9d48-dcb8-4a34-928b-64087a9f966b] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1527.829175] env[63297]: DEBUG oslo_vmware.api [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Task: {'id': task-1697734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195621} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.829465] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.829651] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.829825] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.830007] env[63297]: INFO nova.compute.manager [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1527.830259] env[63297]: DEBUG oslo.service.loopingcall [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.830694] env[63297]: DEBUG nova.compute.manager [-] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1527.830694] env[63297]: DEBUG nova.network.neutron [-] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.911538] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.911775] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.978552] env[63297]: DEBUG oslo_vmware.api [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697732, 'name': PowerOnVM_Task, 'duration_secs': 0.545609} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.985170] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1527.985170] env[63297]: INFO nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1527.985170] env[63297]: DEBUG nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1527.985578] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c53575-19dd-4cac-bc1b-a8be75156da6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.998330] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697735, 'name': CreateVM_Task, 'duration_secs': 0.421958} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.003141] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1528.008680] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.010239] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.010239] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1528.010239] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e085a65-9b4d-4adf-9a3e-36899b6d1aa2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.018018] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1528.018018] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52863672-d5aa-68a4-9927-377fdf05e677" [ 1528.018018] env[63297]: _type = "Task" [ 1528.018018] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.031059] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52863672-d5aa-68a4-9927-377fdf05e677, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.130978] env[63297]: DEBUG nova.compute.manager [req-90159f86-266c-4154-9999-4a404beef51f req-75b49cb0-3c48-4232-b63e-a702a82e9f60 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Received event network-vif-deleted-3db68b1a-d911-4324-b993-dc755277e56b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1528.131217] env[63297]: INFO nova.compute.manager [req-90159f86-266c-4154-9999-4a404beef51f req-75b49cb0-3c48-4232-b63e-a702a82e9f60 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Neutron deleted interface 3db68b1a-d911-4324-b993-dc755277e56b; detaching it from the instance and deleting it from the info cache [ 1528.131389] env[63297]: DEBUG nova.network.neutron [req-90159f86-266c-4154-9999-4a404beef51f req-75b49cb0-3c48-4232-b63e-a702a82e9f60 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.153143] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065727} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.156036] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1528.157251] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f9c985-3b92-49a5-9f53-3999d6f460ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.182842] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a/b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1528.183831] env[63297]: DEBUG nova.network.neutron [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Successfully updated port: 4edba2de-a7ec-4a5d-889c-b76110d2b060 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1528.185309] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-822986bb-28f5-434e-a7d1-a8744a48269f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.209182] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1528.209182] env[63297]: value = "task-1697737" [ 1528.209182] env[63297]: _type = "Task" [ 1528.209182] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.217094] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697737, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.259834] env[63297]: DEBUG nova.compute.utils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.263076] env[63297]: DEBUG nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1528.263250] env[63297]: DEBUG nova.network.neutron [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1528.305095] env[63297]: DEBUG nova.policy [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20a91144677b4efba8ab91acd53d1c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33733e0599840618625ecb3e6bb6029', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1528.306563] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b65e8c04-df55-491e-861c-8aa6def8c9be] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1528.333533] env[63297]: DEBUG nova.network.neutron [req-9ec6bfb2-a663-4cc8-900e-7d0e4c5f6a08 req-c8080b81-8209-4fd0-ab23-6d03cf0f31da service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Updated VIF entry in instance network info cache for port 5da611f1-76bd-4f99-a624-d504e942a954. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1528.334045] env[63297]: DEBUG nova.network.neutron [req-9ec6bfb2-a663-4cc8-900e-7d0e4c5f6a08 req-c8080b81-8209-4fd0-ab23-6d03cf0f31da service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Updating instance_info_cache with network_info: [{"id": "5da611f1-76bd-4f99-a624-d504e942a954", "address": "fa:16:3e:74:4c:fc", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da611f1-76", "ovs_interfaceid": "5da611f1-76bd-4f99-a624-d504e942a954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.414706] env[63297]: DEBUG nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1528.521402] env[63297]: INFO nova.compute.manager [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Took 56.23 seconds to build instance. [ 1528.529597] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52863672-d5aa-68a4-9927-377fdf05e677, 'name': SearchDatastore_Task, 'duration_secs': 0.010758} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.529885] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.530196] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1528.530486] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.530642] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.530825] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1528.531144] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a44c9eed-3dc5-43cc-9c8d-e0caa41d9518 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.539773] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1528.539945] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1528.540805] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6fdd555-1604-498e-b16c-763f9e31a927 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.546495] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1528.546495] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5200b5e5-887e-c809-7655-e05020187126" [ 1528.546495] env[63297]: _type = "Task" [ 1528.546495] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.556213] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5200b5e5-887e-c809-7655-e05020187126, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.610180] env[63297]: DEBUG nova.network.neutron [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Successfully created port: c5d88999-9081-4745-8997-a2c43582e237 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1528.612115] env[63297]: DEBUG nova.network.neutron [-] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.634456] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46076ba8-d0f4-4e44-8ea5-c8ed247ac2a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.647982] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a92e18b-5e39-4034-b5cf-614ab5e7af7e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.683000] env[63297]: DEBUG nova.compute.manager [req-90159f86-266c-4154-9999-4a404beef51f req-75b49cb0-3c48-4232-b63e-a702a82e9f60 service nova] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Detach interface failed, port_id=3db68b1a-d911-4324-b993-dc755277e56b, reason: Instance 9b1306f9-4b0a-4116-8e79-271478f33490 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1528.702444] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquiring lock "refresh_cache-5a868570-7504-4262-80b2-a458c219e689" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.702746] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquired lock "refresh_cache-5a868570-7504-4262-80b2-a458c219e689" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.702746] env[63297]: DEBUG nova.network.neutron [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1528.722788] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697737, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.763900] env[63297]: DEBUG nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1528.811938] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 862302e5-ad7e-40f3-a4a3-8c4a8035e1cf] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1528.836864] env[63297]: DEBUG oslo_concurrency.lockutils [req-9ec6bfb2-a663-4cc8-900e-7d0e4c5f6a08 req-c8080b81-8209-4fd0-ab23-6d03cf0f31da service nova] Releasing lock "refresh_cache-44f4776e-d4a1-40ad-a03b-bb03582b95bd" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.934297] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.024227] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d63513b1-16dd-4f24-846d-407d3098fed5 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "42d872d6-da12-474b-8741-1d991d507cfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.742s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.059480] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5200b5e5-887e-c809-7655-e05020187126, 'name': SearchDatastore_Task, 'duration_secs': 0.01172} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.060287] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd735644-2a0e-4436-af76-a775c718565d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.067343] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1529.067343] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524464d2-2148-d2db-4694-d3fc3b12459d" [ 1529.067343] env[63297]: _type = "Task" [ 1529.067343] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.074605] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524464d2-2148-d2db-4694-d3fc3b12459d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.115109] env[63297]: INFO nova.compute.manager [-] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Took 1.28 seconds to deallocate network for instance. [ 1529.126117] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa872ffd-0592-46ef-9542-c3fda2d3fe89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.133867] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2117fd65-733a-476f-b1f0-38377f080a1d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.164621] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e3a857-a056-43e7-8f9a-3bd51fbf83cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.172463] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fab90a2-64e0-4f1a-8312-6483b1193a93 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.188594] env[63297]: DEBUG nova.compute.provider_tree [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1529.218378] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697737, 'name': ReconfigVM_Task, 'duration_secs': 0.817089} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.218657] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Reconfigured VM instance instance-0000004d to attach disk [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a/b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1529.219346] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-893c669e-9e13-4f64-9203-33ccf734d819 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.227028] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1529.227028] env[63297]: value = "task-1697738" [ 1529.227028] env[63297]: _type = "Task" [ 1529.227028] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.234162] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697738, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.234864] env[63297]: DEBUG nova.network.neutron [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1529.322169] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 22a927ad-c2af-4814-b728-ec31b76a34d4] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1529.377163] env[63297]: DEBUG nova.network.neutron [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Updating instance_info_cache with network_info: [{"id": "4edba2de-a7ec-4a5d-889c-b76110d2b060", "address": "fa:16:3e:11:23:0d", "network": {"id": "da840baf-2df6-4f03-809f-f567f712cf8a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1784417458-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "865def46f5404d40b485bb482ed9a05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4edba2de-a7", "ovs_interfaceid": "4edba2de-a7ec-4a5d-889c-b76110d2b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.577581] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524464d2-2148-d2db-4694-d3fc3b12459d, 'name': SearchDatastore_Task, 'duration_secs': 0.009366} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.577826] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.578090] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 44f4776e-d4a1-40ad-a03b-bb03582b95bd/44f4776e-d4a1-40ad-a03b-bb03582b95bd.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1529.578353] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b453887a-268b-493f-b8eb-1a6332884370 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.584547] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1529.584547] env[63297]: value = "task-1697739" [ 1529.584547] env[63297]: _type = "Task" [ 1529.584547] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.593457] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.623686] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.638720] env[63297]: DEBUG nova.compute.manager [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Received event network-vif-plugged-4edba2de-a7ec-4a5d-889c-b76110d2b060 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1529.638894] env[63297]: DEBUG oslo_concurrency.lockutils [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] Acquiring lock "5a868570-7504-4262-80b2-a458c219e689-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.639115] env[63297]: DEBUG oslo_concurrency.lockutils [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] Lock "5a868570-7504-4262-80b2-a458c219e689-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.639284] env[63297]: DEBUG oslo_concurrency.lockutils [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] Lock "5a868570-7504-4262-80b2-a458c219e689-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.639450] env[63297]: DEBUG nova.compute.manager [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] No waiting events found dispatching network-vif-plugged-4edba2de-a7ec-4a5d-889c-b76110d2b060 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1529.639611] env[63297]: WARNING nova.compute.manager [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Received unexpected event network-vif-plugged-4edba2de-a7ec-4a5d-889c-b76110d2b060 for instance with vm_state building and task_state spawning. [ 1529.639766] env[63297]: DEBUG nova.compute.manager [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Received event network-changed-4edba2de-a7ec-4a5d-889c-b76110d2b060 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1529.639917] env[63297]: DEBUG nova.compute.manager [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Refreshing instance network info cache due to event network-changed-4edba2de-a7ec-4a5d-889c-b76110d2b060. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1529.640239] env[63297]: DEBUG oslo_concurrency.lockutils [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] Acquiring lock "refresh_cache-5a868570-7504-4262-80b2-a458c219e689" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.691397] env[63297]: DEBUG nova.scheduler.client.report [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1529.736239] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697738, 'name': Rename_Task, 'duration_secs': 0.179865} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.736520] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1529.736765] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7792e60-3563-4619-bc2f-ae61b21670cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.743872] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1529.743872] env[63297]: value = "task-1697740" [ 1529.743872] env[63297]: _type = "Task" [ 1529.743872] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.753296] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.777658] env[63297]: DEBUG nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.807061] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.807500] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.807500] env[63297]: DEBUG nova.virt.hardware [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.808330] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9756d4-05db-4ec8-b312-8fe7bd57f588 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.816723] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea534c79-fec3-47f9-8aeb-fbe730ec7f37 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.832545] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 87fa97a7-a8a5-4184-b52a-b02ad5468127] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1529.880066] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Releasing lock "refresh_cache-5a868570-7504-4262-80b2-a458c219e689" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.880450] env[63297]: DEBUG nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Instance network_info: |[{"id": "4edba2de-a7ec-4a5d-889c-b76110d2b060", "address": "fa:16:3e:11:23:0d", "network": {"id": "da840baf-2df6-4f03-809f-f567f712cf8a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1784417458-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "865def46f5404d40b485bb482ed9a05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4edba2de-a7", "ovs_interfaceid": "4edba2de-a7ec-4a5d-889c-b76110d2b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1529.880776] env[63297]: DEBUG oslo_concurrency.lockutils [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] Acquired lock "refresh_cache-5a868570-7504-4262-80b2-a458c219e689" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.880980] env[63297]: DEBUG nova.network.neutron [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Refreshing network info cache for port 4edba2de-a7ec-4a5d-889c-b76110d2b060 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1529.882326] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:23:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ff3ecd2f-0b10-4faf-a512-fd7a20c28df1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4edba2de-a7ec-4a5d-889c-b76110d2b060', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.890343] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Creating folder: Project (865def46f5404d40b485bb482ed9a05b). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1529.892516] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-207586fd-bea5-4825-832f-4f89deefa99a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.904738] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Created folder: Project (865def46f5404d40b485bb482ed9a05b) in parent group-v353718. [ 1529.904941] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Creating folder: Instances. Parent ref: group-v353936. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1529.905235] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81213b86-326e-4a4b-a941-065b3e967107 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.914763] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Created folder: Instances in parent group-v353936. [ 1529.915016] env[63297]: DEBUG oslo.service.loopingcall [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.915226] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a868570-7504-4262-80b2-a458c219e689] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1529.915447] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b751b57e-958e-4b0e-b644-0528f7d23032 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.935836] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.935836] env[63297]: value = "task-1697743" [ 1529.935836] env[63297]: _type = "Task" [ 1529.935836] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.944104] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697743, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.096410] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697739, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.197600] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.197600] env[63297]: DEBUG nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1530.200126] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.063s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.200875] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.202752] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.186s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.204496] env[63297]: DEBUG nova.objects.instance [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lazy-loading 'pci_requests' on Instance uuid fba9040d-f904-44a1-8785-14d4696ea939 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1530.232590] env[63297]: INFO nova.scheduler.client.report [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted allocations for instance efaa465d-f6b2-4891-8e96-b4c3af052759 [ 1530.254031] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697740, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.335598] env[63297]: DEBUG nova.network.neutron [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Successfully updated port: c5d88999-9081-4745-8997-a2c43582e237 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.336973] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 92439795-6240-4103-940b-de6d87738570] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1530.446346] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697743, 'name': CreateVM_Task, 'duration_secs': 0.423199} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.446573] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a868570-7504-4262-80b2-a458c219e689] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1530.447370] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.447542] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.447869] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.448160] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6b0ff7a-72f5-4213-a545-3333b272c6c3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.452965] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1530.452965] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d918d-e8fc-82cd-7014-aa285490cb24" [ 1530.452965] env[63297]: _type = "Task" [ 1530.452965] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.463856] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d918d-e8fc-82cd-7014-aa285490cb24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.595763] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697739, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576656} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.596016] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 44f4776e-d4a1-40ad-a03b-bb03582b95bd/44f4776e-d4a1-40ad-a03b-bb03582b95bd.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1530.596255] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1530.596507] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0152ed9-5d0d-4be1-aedf-1401beeb0bde {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.601337] env[63297]: DEBUG nova.network.neutron [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Updated VIF entry in instance network info cache for port 4edba2de-a7ec-4a5d-889c-b76110d2b060. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1530.601601] env[63297]: DEBUG nova.network.neutron [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Updating instance_info_cache with network_info: [{"id": "4edba2de-a7ec-4a5d-889c-b76110d2b060", "address": "fa:16:3e:11:23:0d", "network": {"id": "da840baf-2df6-4f03-809f-f567f712cf8a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1784417458-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "865def46f5404d40b485bb482ed9a05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4edba2de-a7", "ovs_interfaceid": "4edba2de-a7ec-4a5d-889c-b76110d2b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.604480] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1530.604480] env[63297]: value = "task-1697744" [ 1530.604480] env[63297]: _type = "Task" [ 1530.604480] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.612709] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.706943] env[63297]: DEBUG nova.objects.instance [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lazy-loading 'numa_topology' on Instance uuid fba9040d-f904-44a1-8785-14d4696ea939 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1530.711586] env[63297]: DEBUG nova.compute.utils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1530.714673] env[63297]: DEBUG nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1530.714673] env[63297]: DEBUG nova.network.neutron [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1530.741731] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a43b752d-09fc-4d96-83ab-627db03fa167 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "efaa465d-f6b2-4891-8e96-b4c3af052759" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.274s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.754334] env[63297]: DEBUG oslo_vmware.api [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697740, 'name': PowerOnVM_Task, 'duration_secs': 0.619459} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.754596] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1530.754827] env[63297]: INFO nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Took 8.75 seconds to spawn the instance on the hypervisor. [ 1530.755014] env[63297]: DEBUG nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1530.755993] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e6102b-0ff4-44fb-bb7b-431c79003df6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.792969] env[63297]: DEBUG nova.policy [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f80bce1510594b8a95537f814f68b2bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45448893e7ee4b8d896d1bb3f3a9ecf1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1530.841660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "refresh_cache-5cdb44c7-3dc1-4bce-8864-a1a40150e730" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.841892] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "refresh_cache-5cdb44c7-3dc1-4bce-8864-a1a40150e730" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.841967] env[63297]: DEBUG nova.network.neutron [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1530.843593] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: eebcad60-4b8a-4fa0-b846-b65972c4c69c] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1530.972432] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524d918d-e8fc-82cd-7014-aa285490cb24, 'name': SearchDatastore_Task, 'duration_secs': 0.01031} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.972834] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.973018] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.973269] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.973419] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.973597] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1530.973902] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cf95f2f-467b-47e2-943d-29e02174a137 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.983833] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1530.984020] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1530.984823] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b0ef0b9-b371-41e0-ba94-17f076ded230 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.990187] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1530.990187] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521ce61d-9137-fc6a-df47-1329b94c341e" [ 1530.990187] env[63297]: _type = "Task" [ 1530.990187] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.997957] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521ce61d-9137-fc6a-df47-1329b94c341e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.093450] env[63297]: DEBUG nova.network.neutron [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Successfully created port: fb3d9f25-b216-49b6-90c4-a53298983b44 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1531.105097] env[63297]: DEBUG oslo_concurrency.lockutils [req-db2f40e0-cf18-45b8-9a71-a8a9a94be298 req-7abb888d-3fc0-4189-b8cd-d9110d324ade service nova] Releasing lock "refresh_cache-5a868570-7504-4262-80b2-a458c219e689" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.113871] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084941} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.114141] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1531.114907] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e64f7d-d367-42f5-b9be-847c515e0f6d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.137453] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 44f4776e-d4a1-40ad-a03b-bb03582b95bd/44f4776e-d4a1-40ad-a03b-bb03582b95bd.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1531.137726] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24f5918e-b681-4d70-a290-593be200031b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.157645] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1531.157645] env[63297]: value = "task-1697745" [ 1531.157645] env[63297]: _type = "Task" [ 1531.157645] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.169558] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697745, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.217024] env[63297]: DEBUG nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1531.217799] env[63297]: INFO nova.compute.claims [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1531.272996] env[63297]: INFO nova.compute.manager [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Took 58.67 seconds to build instance. [ 1531.351991] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 42ac7d4b-135c-4dd6-a3f5-3bdf38e9086a] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1531.402604] env[63297]: DEBUG nova.network.neutron [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.501565] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521ce61d-9137-fc6a-df47-1329b94c341e, 'name': SearchDatastore_Task, 'duration_secs': 0.009969} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.505497] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73cdc729-e9c2-432d-8cbb-d7f0435aaa7b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.517393] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1531.517393] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5235cb92-acf4-011d-5565-6b6b82d4de63" [ 1531.517393] env[63297]: _type = "Task" [ 1531.517393] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.526329] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5235cb92-acf4-011d-5565-6b6b82d4de63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.608477] env[63297]: DEBUG nova.network.neutron [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Updating instance_info_cache with network_info: [{"id": "c5d88999-9081-4745-8997-a2c43582e237", "address": "fa:16:3e:f2:59:f4", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d88999-90", "ovs_interfaceid": "c5d88999-9081-4745-8997-a2c43582e237", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.672154] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697745, 'name': ReconfigVM_Task, 'duration_secs': 0.340005} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.673838] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 44f4776e-d4a1-40ad-a03b-bb03582b95bd/44f4776e-d4a1-40ad-a03b-bb03582b95bd.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1531.673838] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fcb5ae7-31ec-48df-83b1-528dee1f0ac6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.681137] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1531.681137] env[63297]: value = "task-1697746" [ 1531.681137] env[63297]: _type = "Task" [ 1531.681137] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.686554] env[63297]: DEBUG nova.compute.manager [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Received event network-vif-plugged-c5d88999-9081-4745-8997-a2c43582e237 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1531.686762] env[63297]: DEBUG oslo_concurrency.lockutils [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] Acquiring lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.686969] env[63297]: DEBUG oslo_concurrency.lockutils [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] Lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.687221] env[63297]: DEBUG oslo_concurrency.lockutils [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] Lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.687393] env[63297]: DEBUG nova.compute.manager [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] No waiting events found dispatching network-vif-plugged-c5d88999-9081-4745-8997-a2c43582e237 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1531.687556] env[63297]: WARNING nova.compute.manager [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Received unexpected event network-vif-plugged-c5d88999-9081-4745-8997-a2c43582e237 for instance with vm_state building and task_state spawning. [ 1531.687812] env[63297]: DEBUG nova.compute.manager [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Received event network-changed-c5d88999-9081-4745-8997-a2c43582e237 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1531.688014] env[63297]: DEBUG nova.compute.manager [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Refreshing instance network info cache due to event network-changed-c5d88999-9081-4745-8997-a2c43582e237. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1531.688194] env[63297]: DEBUG oslo_concurrency.lockutils [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] Acquiring lock "refresh_cache-5cdb44c7-3dc1-4bce-8864-a1a40150e730" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.695825] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697746, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.775418] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2b346c6-5efa-4e1e-8a8c-62c0e5570071 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.185s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.862151] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 8c10c573-de56-4c72-959a-65bf53b805a5] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1532.028169] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5235cb92-acf4-011d-5565-6b6b82d4de63, 'name': SearchDatastore_Task, 'duration_secs': 0.01481} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.028454] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.028706] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5a868570-7504-4262-80b2-a458c219e689/5a868570-7504-4262-80b2-a458c219e689.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1532.028960] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46c53439-9fb8-4582-8bb8-9621ab0cd65b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.035770] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1532.035770] env[63297]: value = "task-1697747" [ 1532.035770] env[63297]: _type = "Task" [ 1532.035770] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.044026] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.113315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "refresh_cache-5cdb44c7-3dc1-4bce-8864-a1a40150e730" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.113315] env[63297]: DEBUG nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Instance network_info: |[{"id": "c5d88999-9081-4745-8997-a2c43582e237", "address": "fa:16:3e:f2:59:f4", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d88999-90", "ovs_interfaceid": "c5d88999-9081-4745-8997-a2c43582e237", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1532.113315] env[63297]: DEBUG oslo_concurrency.lockutils [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] Acquired lock "refresh_cache-5cdb44c7-3dc1-4bce-8864-a1a40150e730" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.113559] env[63297]: DEBUG nova.network.neutron [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Refreshing network info cache for port c5d88999-9081-4745-8997-a2c43582e237 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.114798] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:59:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5d88999-9081-4745-8997-a2c43582e237', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.123182] env[63297]: DEBUG oslo.service.loopingcall [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.124184] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1532.124416] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4273ab60-e80a-4b3f-9f45-b63b05a76996 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.144835] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.144835] env[63297]: value = "task-1697748" [ 1532.144835] env[63297]: _type = "Task" [ 1532.144835] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.153311] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697748, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.163263] env[63297]: INFO nova.compute.manager [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Rescuing [ 1532.163473] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.163646] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.163869] env[63297]: DEBUG nova.network.neutron [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1532.190079] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697746, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.235559] env[63297]: DEBUG nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1532.260117] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1532.260296] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1532.260413] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1532.260590] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1532.260737] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1532.260882] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1532.261099] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1532.261262] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1532.261429] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1532.261681] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1532.261948] env[63297]: DEBUG nova.virt.hardware [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1532.262894] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b98deb9-e7db-4c61-8cc7-04239e421001 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.275932] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0cc753-5c49-4824-b0c5-7ddd7d028d58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.364089] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 86a0579f-211c-42bc-925a-e30aaca4e0f5] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1532.546452] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697747, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.626423] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917518ef-559e-4f92-89f8-fdc9f9e52d26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.634712] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ed8f99-7c05-433d-a185-1ceeb84b6c8c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.670911] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc99574-b397-42e2-bf67-4c0622e7b298 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.683019] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697748, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.685974] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1712b1e3-6b19-4a13-a9f9-6fb8d26637c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.694281] env[63297]: DEBUG nova.network.neutron [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Successfully updated port: fb3d9f25-b216-49b6-90c4-a53298983b44 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1532.707477] env[63297]: DEBUG nova.compute.provider_tree [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.708835] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697746, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.868357] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: f3a579de-1f29-4b67-8dc8-07ea37267001] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1533.010653] env[63297]: DEBUG nova.network.neutron [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Updated VIF entry in instance network info cache for port c5d88999-9081-4745-8997-a2c43582e237. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1533.011107] env[63297]: DEBUG nova.network.neutron [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Updating instance_info_cache with network_info: [{"id": "c5d88999-9081-4745-8997-a2c43582e237", "address": "fa:16:3e:f2:59:f4", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d88999-90", "ovs_interfaceid": "c5d88999-9081-4745-8997-a2c43582e237", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.055786] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535506} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.056073] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5a868570-7504-4262-80b2-a458c219e689/5a868570-7504-4262-80b2-a458c219e689.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1533.056433] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1533.056713] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e6cce1f-a177-4477-9de6-fc2003081f3a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.063922] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1533.063922] env[63297]: value = "task-1697749" [ 1533.063922] env[63297]: _type = "Task" [ 1533.063922] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.075219] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.087065] env[63297]: DEBUG nova.network.neutron [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Updating instance_info_cache with network_info: [{"id": "b7115a0d-4014-408b-b05e-52f08768ec9e", "address": "fa:16:3e:70:79:be", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7115a0d-40", "ovs_interfaceid": "b7115a0d-4014-408b-b05e-52f08768ec9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.102951] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.103335] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.173544] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697748, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.191673] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697746, 'name': Rename_Task, 'duration_secs': 1.170867} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.191953] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1533.192221] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0228c037-8f5a-4167-9271-5609d70c6651 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.199404] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1533.199404] env[63297]: value = "task-1697750" [ 1533.199404] env[63297]: _type = "Task" [ 1533.199404] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.207981] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697750, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.211038] env[63297]: DEBUG nova.scheduler.client.report [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1533.214618] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-57d93827-2a5a-4f12-a74b-147a1a934dd1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.214748] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-57d93827-2a5a-4f12-a74b-147a1a934dd1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.214894] env[63297]: DEBUG nova.network.neutron [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1533.371147] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 754e64ec-b6fa-49d8-9de6-ef38918378fd] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1533.514866] env[63297]: DEBUG oslo_concurrency.lockutils [req-f0de017b-4ecc-4ce8-8da3-954742d93a7e req-ffe5bd9f-d394-4578-971e-3aec619fe845 service nova] Releasing lock "refresh_cache-5cdb44c7-3dc1-4bce-8864-a1a40150e730" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.574535] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076758} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.574800] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1533.575619] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d007152b-bf5d-4cf0-84c6-57ae8a3f4122 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.597518] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 5a868570-7504-4262-80b2-a458c219e689/5a868570-7504-4262-80b2-a458c219e689.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1533.598078] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "refresh_cache-b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.600061] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00ec935c-fd0a-4051-8bbe-e8c84c16466f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.616331] env[63297]: DEBUG nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1533.624192] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1533.624192] env[63297]: value = "task-1697751" [ 1533.624192] env[63297]: _type = "Task" [ 1533.624192] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.633014] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697751, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.673602] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697748, 'name': CreateVM_Task, 'duration_secs': 1.224878} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.673731] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.674376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.674549] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.674873] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.675149] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08c9171f-fea5-47a6-8278-0a40d566c30d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.680402] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1533.680402] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b504c8-d010-d663-36ca-116a735b9c4c" [ 1533.680402] env[63297]: _type = "Task" [ 1533.680402] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.688788] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b504c8-d010-d663-36ca-116a735b9c4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.708425] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697750, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.714299] env[63297]: DEBUG nova.compute.manager [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Received event network-vif-plugged-fb3d9f25-b216-49b6-90c4-a53298983b44 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1533.714423] env[63297]: DEBUG oslo_concurrency.lockutils [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] Acquiring lock "57d93827-2a5a-4f12-a74b-147a1a934dd1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.714777] env[63297]: DEBUG oslo_concurrency.lockutils [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] Lock "57d93827-2a5a-4f12-a74b-147a1a934dd1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.714777] env[63297]: DEBUG oslo_concurrency.lockutils [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] Lock "57d93827-2a5a-4f12-a74b-147a1a934dd1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.714958] env[63297]: DEBUG nova.compute.manager [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] No waiting events found dispatching network-vif-plugged-fb3d9f25-b216-49b6-90c4-a53298983b44 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1533.715142] env[63297]: WARNING nova.compute.manager [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Received unexpected event network-vif-plugged-fb3d9f25-b216-49b6-90c4-a53298983b44 for instance with vm_state building and task_state spawning. [ 1533.715321] env[63297]: DEBUG nova.compute.manager [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Received event network-changed-fb3d9f25-b216-49b6-90c4-a53298983b44 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1533.715473] env[63297]: DEBUG nova.compute.manager [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Refreshing instance network info cache due to event network-changed-fb3d9f25-b216-49b6-90c4-a53298983b44. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1533.716047] env[63297]: DEBUG oslo_concurrency.lockutils [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] Acquiring lock "refresh_cache-57d93827-2a5a-4f12-a74b-147a1a934dd1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.716595] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.514s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.720340] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.641s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.720534] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.722190] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.842s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.722410] env[63297]: DEBUG nova.objects.instance [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lazy-loading 'resources' on Instance uuid 4701073f-eeee-4f37-919a-4c53663ac15f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1533.749263] env[63297]: INFO nova.scheduler.client.report [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Deleted allocations for instance b853b581-ea46-4455-8cdb-6ea2f31c22be [ 1533.776809] env[63297]: INFO nova.network.neutron [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating port 1dd69c1e-7eee-4b1b-b4a7-421ab5477495 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1533.780424] env[63297]: DEBUG nova.network.neutron [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1533.875139] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 459d5a17-182b-4284-b464-57d342981031] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1534.027046] env[63297]: DEBUG nova.network.neutron [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Updating instance_info_cache with network_info: [{"id": "fb3d9f25-b216-49b6-90c4-a53298983b44", "address": "fa:16:3e:2f:cd:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb3d9f25-b2", "ovs_interfaceid": "fb3d9f25-b216-49b6-90c4-a53298983b44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.137970] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.144477] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.147727] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1534.148035] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42e8ee85-cd77-44cc-a48b-f4889a9efffb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.155445] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1534.155445] env[63297]: value = "task-1697752" [ 1534.155445] env[63297]: _type = "Task" [ 1534.155445] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.163777] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.194122] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b504c8-d010-d663-36ca-116a735b9c4c, 'name': SearchDatastore_Task, 'duration_secs': 0.010853} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.194490] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.194739] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.194991] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.195226] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.195443] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1534.195717] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7063b7b1-3a68-421d-8438-b655b17f1b27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.205789] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1534.205971] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1534.207159] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-881e307f-a5e6-4307-8870-365963857947 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.214977] env[63297]: DEBUG oslo_vmware.api [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697750, 'name': PowerOnVM_Task, 'duration_secs': 0.60925} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.216556] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1534.216556] env[63297]: INFO nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1534.216556] env[63297]: DEBUG nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1534.217471] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debea5b5-614f-4bd7-9933-f427f330457a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.224024] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1534.224024] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526bb37b-7eb6-e029-f0b6-0d144398b608" [ 1534.224024] env[63297]: _type = "Task" [ 1534.224024] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.238863] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526bb37b-7eb6-e029-f0b6-0d144398b608, 'name': SearchDatastore_Task, 'duration_secs': 0.010416} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.239997] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52a5fd56-f9a7-4307-903a-cf9609f3dae0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.245222] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1534.245222] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52096de9-de5f-66e6-6caf-a26a20be2ca4" [ 1534.245222] env[63297]: _type = "Task" [ 1534.245222] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.256862] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52096de9-de5f-66e6-6caf-a26a20be2ca4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.259907] env[63297]: DEBUG oslo_concurrency.lockutils [None req-360d6c4a-f45d-4b59-af6a-7ea860dd3cf0 tempest-VolumesAdminNegativeTest-812749398 tempest-VolumesAdminNegativeTest-812749398-project-member] Lock "b853b581-ea46-4455-8cdb-6ea2f31c22be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.269s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.378373] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: eff06e8a-8341-4d5e-b6dd-a585be4a21ea] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1534.529622] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-57d93827-2a5a-4f12-a74b-147a1a934dd1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.529933] env[63297]: DEBUG nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Instance network_info: |[{"id": "fb3d9f25-b216-49b6-90c4-a53298983b44", "address": "fa:16:3e:2f:cd:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb3d9f25-b2", "ovs_interfaceid": "fb3d9f25-b216-49b6-90c4-a53298983b44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1534.530274] env[63297]: DEBUG oslo_concurrency.lockutils [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] Acquired lock "refresh_cache-57d93827-2a5a-4f12-a74b-147a1a934dd1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.530422] env[63297]: DEBUG nova.network.neutron [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Refreshing network info cache for port fb3d9f25-b216-49b6-90c4-a53298983b44 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1534.531594] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:cd:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb3d9f25-b216-49b6-90c4-a53298983b44', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1534.539527] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating folder: Project (45448893e7ee4b8d896d1bb3f3a9ecf1). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1534.542936] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8e1dbbe-32eb-4c20-a86e-d56868cac980 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.554130] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created folder: Project (45448893e7ee4b8d896d1bb3f3a9ecf1) in parent group-v353718. [ 1534.554325] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating folder: Instances. Parent ref: group-v353940. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1534.554566] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-761261c0-0895-46eb-93f1-4e0df588fd2b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.565881] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created folder: Instances in parent group-v353940. [ 1534.566139] env[63297]: DEBUG oslo.service.loopingcall [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1534.566336] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1534.566536] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfcff7f5-27d3-4e77-80b0-f274d48f703d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.588270] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1534.588270] env[63297]: value = "task-1697755" [ 1534.588270] env[63297]: _type = "Task" [ 1534.588270] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.595310] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697755, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.596872] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d46194-b3be-4dee-9e14-9c826d402319 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.604119] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3a1c19-7fb2-4243-bb0b-bccee5ff9209 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.638767] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3a1288-3d0e-4d3c-a897-0bbb8eda254f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.646712] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697751, 'name': ReconfigVM_Task, 'duration_secs': 0.650354} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.648915] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 5a868570-7504-4262-80b2-a458c219e689/5a868570-7504-4262-80b2-a458c219e689.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1534.649592] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8c34924-362f-47cd-b1d8-db8898dbaad4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.652088] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a2eaae-0546-4b19-a220-24dc91967a80 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.660892] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1534.660892] env[63297]: value = "task-1697756" [ 1534.660892] env[63297]: _type = "Task" [ 1534.660892] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.672058] env[63297]: DEBUG nova.compute.provider_tree [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1534.673570] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697752, 'name': PowerOffVM_Task, 'duration_secs': 0.203866} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.677529] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1534.678223] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3085ca-48ad-423d-b8ae-cebe1fd2f346 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.686822] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697756, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.702102] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c485670-3740-46d6-a0f8-d7cb13608462 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.751227] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1534.751227] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32785907-0cab-4c19-8d5d-bdce3f59e1c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.760751] env[63297]: INFO nova.compute.manager [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Took 43.23 seconds to build instance. [ 1534.769242] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1534.769242] env[63297]: value = "task-1697757" [ 1534.769242] env[63297]: _type = "Task" [ 1534.769242] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.774904] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52096de9-de5f-66e6-6caf-a26a20be2ca4, 'name': SearchDatastore_Task, 'duration_secs': 0.011511} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.779788] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.780366] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5cdb44c7-3dc1-4bce-8864-a1a40150e730/5cdb44c7-3dc1-4bce-8864-a1a40150e730.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1534.781411] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4d9cf9b-c910-4a11-a217-657218c7d850 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.792867] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1534.792867] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.793092] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.793148] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.793321] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1534.793634] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1534.793634] env[63297]: value = "task-1697758" [ 1534.793634] env[63297]: _type = "Task" [ 1534.793634] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.793882] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-543cf619-54ad-499e-9b44-056489943b15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.806601] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.808046] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1534.808220] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1534.808948] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-511a61a0-fed3-4ee4-8484-4b788d8bfa2c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.814897] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1534.814897] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5226feec-82b8-13b7-60e8-a299037d4d23" [ 1534.814897] env[63297]: _type = "Task" [ 1534.814897] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.823419] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5226feec-82b8-13b7-60e8-a299037d4d23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.882947] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 746742ac-8d7a-466b-8bc0-043cb5422111] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1535.100115] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697755, 'name': CreateVM_Task, 'duration_secs': 0.430642} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.100369] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1535.101080] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.101252] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.101587] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1535.101856] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cebb8ad1-f1b8-46c5-bc6c-e42bf893103f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.107371] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1535.107371] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5290571c-1286-f0f5-5046-cbfcfe7f8c2c" [ 1535.107371] env[63297]: _type = "Task" [ 1535.107371] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.119965] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5290571c-1286-f0f5-5046-cbfcfe7f8c2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.180308] env[63297]: DEBUG nova.scheduler.client.report [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1535.188372] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697756, 'name': Rename_Task, 'duration_secs': 0.175571} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.188372] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1535.188372] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-493ffe1c-1090-4bfe-a4d4-53713e5bdffc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.195370] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1535.195370] env[63297]: value = "task-1697759" [ 1535.195370] env[63297]: _type = "Task" [ 1535.195370] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.206066] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697759, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.264485] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5858fdb1-6d00-4618-8ba4-2a2e1b91c53f tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.742s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.307683] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502134} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.308158] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5cdb44c7-3dc1-4bce-8864-a1a40150e730/5cdb44c7-3dc1-4bce-8864-a1a40150e730.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1535.309343] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1535.309820] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76f44320-c7e9-4b48-a345-6bec86560479 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.320911] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1535.320911] env[63297]: value = "task-1697760" [ 1535.320911] env[63297]: _type = "Task" [ 1535.320911] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.334553] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697760, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.342374] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5226feec-82b8-13b7-60e8-a299037d4d23, 'name': SearchDatastore_Task, 'duration_secs': 0.00988} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.343761] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-133438ed-1caf-49ba-889c-e18286af23ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.347214] env[63297]: DEBUG nova.network.neutron [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Updated VIF entry in instance network info cache for port fb3d9f25-b216-49b6-90c4-a53298983b44. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1535.347721] env[63297]: DEBUG nova.network.neutron [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Updating instance_info_cache with network_info: [{"id": "fb3d9f25-b216-49b6-90c4-a53298983b44", "address": "fa:16:3e:2f:cd:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb3d9f25-b2", "ovs_interfaceid": "fb3d9f25-b216-49b6-90c4-a53298983b44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.353039] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1535.353039] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5216546d-9c67-ed4e-f414-395bd05abad8" [ 1535.353039] env[63297]: _type = "Task" [ 1535.353039] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.363867] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5216546d-9c67-ed4e-f414-395bd05abad8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.387761] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 4438e230-0589-48ae-8848-d1f8414efa61] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1535.475498] env[63297]: DEBUG nova.compute.manager [req-a66f4f01-672d-4a96-9f2b-d9cdf72a622d req-e1e91bbf-6f5b-4bb5-bb01-4c2ebd8984d2 service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received event network-vif-plugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1535.475980] env[63297]: DEBUG oslo_concurrency.lockutils [req-a66f4f01-672d-4a96-9f2b-d9cdf72a622d req-e1e91bbf-6f5b-4bb5-bb01-4c2ebd8984d2 service nova] Acquiring lock "fba9040d-f904-44a1-8785-14d4696ea939-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.476422] env[63297]: DEBUG oslo_concurrency.lockutils [req-a66f4f01-672d-4a96-9f2b-d9cdf72a622d req-e1e91bbf-6f5b-4bb5-bb01-4c2ebd8984d2 service nova] Lock "fba9040d-f904-44a1-8785-14d4696ea939-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.476699] env[63297]: DEBUG oslo_concurrency.lockutils [req-a66f4f01-672d-4a96-9f2b-d9cdf72a622d req-e1e91bbf-6f5b-4bb5-bb01-4c2ebd8984d2 service nova] Lock "fba9040d-f904-44a1-8785-14d4696ea939-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.476970] env[63297]: DEBUG nova.compute.manager [req-a66f4f01-672d-4a96-9f2b-d9cdf72a622d req-e1e91bbf-6f5b-4bb5-bb01-4c2ebd8984d2 service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] No waiting events found dispatching network-vif-plugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1535.477285] env[63297]: WARNING nova.compute.manager [req-a66f4f01-672d-4a96-9f2b-d9cdf72a622d req-e1e91bbf-6f5b-4bb5-bb01-4c2ebd8984d2 service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received unexpected event network-vif-plugged-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 for instance with vm_state shelved_offloaded and task_state spawning. [ 1535.623582] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5290571c-1286-f0f5-5046-cbfcfe7f8c2c, 'name': SearchDatastore_Task, 'duration_secs': 0.055388} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.623582] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.623897] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.623897] env[63297]: DEBUG nova.network.neutron [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1535.626243] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.626243] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1535.626462] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.684212] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.687039] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.804s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.687529] env[63297]: DEBUG nova.objects.instance [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lazy-loading 'resources' on Instance uuid fc54a008-eb2e-4b10-86ea-be7c82b93139 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1535.716951] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697759, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.718710] env[63297]: INFO nova.scheduler.client.report [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleted allocations for instance 4701073f-eeee-4f37-919a-4c53663ac15f [ 1535.834238] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697760, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073668} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.834512] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1535.835375] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761fc9e9-7ebb-4c0e-b53b-53f7622b8297 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.850911] env[63297]: DEBUG nova.compute.manager [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1535.851686] env[63297]: DEBUG oslo_concurrency.lockutils [req-7521a323-bff5-414f-9e1a-3a437818121c req-427f0163-2d7c-4990-a53d-304d4c9d1100 service nova] Releasing lock "refresh_cache-57d93827-2a5a-4f12-a74b-147a1a934dd1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.860525] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 5cdb44c7-3dc1-4bce-8864-a1a40150e730/5cdb44c7-3dc1-4bce-8864-a1a40150e730.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1535.861264] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6827f8-d3b8-4f79-9e5e-d1155c9f3207 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.864071] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-921b8e24-245d-414d-bd4c-e909b0bf8eb8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.887372] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5216546d-9c67-ed4e-f414-395bd05abad8, 'name': SearchDatastore_Task, 'duration_secs': 0.00959} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.889614] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.889894] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. {{(pid=63297) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1535.890245] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1535.890245] env[63297]: value = "task-1697761" [ 1535.890245] env[63297]: _type = "Task" [ 1535.890245] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.892727] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.892927] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1535.893339] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 35c68986-51b5-43ba-a076-aca3c86d68bc] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1535.898026] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b593141c-ed7f-4810-a042-859592c0343a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.898026] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-849d49c2-74a4-4ad7-8fbe-6542aa1bc11b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.908660] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.910888] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1535.910888] env[63297]: value = "task-1697762" [ 1535.910888] env[63297]: _type = "Task" [ 1535.910888] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.911251] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1535.911344] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1535.912288] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-194ac977-21f7-4899-9796-042fef6f64b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.924996] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1535.924996] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d383b1-e17d-c312-4219-8a5504c06d70" [ 1535.924996] env[63297]: _type = "Task" [ 1535.924996] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.925234] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.934529] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d383b1-e17d-c312-4219-8a5504c06d70, 'name': SearchDatastore_Task, 'duration_secs': 0.010047} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.936911] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc8e1136-c0f7-459a-bdcb-f84e3799eca7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.941788] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1535.941788] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b30720-b8ff-9b82-4fca-9a532c449774" [ 1535.941788] env[63297]: _type = "Task" [ 1535.941788] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.952210] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b30720-b8ff-9b82-4fca-9a532c449774, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.206530] env[63297]: DEBUG oslo_vmware.api [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697759, 'name': PowerOnVM_Task, 'duration_secs': 0.737898} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.206660] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1536.207051] env[63297]: INFO nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Took 8.91 seconds to spawn the instance on the hypervisor. [ 1536.207051] env[63297]: DEBUG nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1536.207926] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aaa51ad-ecd8-4ff8-becf-ef690543aa27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.231566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4749fb4e-3a0a-4689-b1ff-568480d0f282 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "4701073f-eeee-4f37-919a-4c53663ac15f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.210s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.398985] env[63297]: INFO nova.compute.manager [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] instance snapshotting [ 1536.400706] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 6d290634-67e7-4fb4-9a88-3da6eca34d4b] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1536.407332] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4d2632-0983-416f-8999-55231d9e9c18 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.433815] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.437650] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60d8f77-0319-4325-8119-0dcf20cb7cdb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.441195] env[63297]: DEBUG nova.network.neutron [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [{"id": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "address": "fa:16:3e:13:0a:af", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd69c1e-7e", "ovs_interfaceid": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.446660] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697762, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502391} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.451061] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. [ 1536.454423] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b39b66-94e9-43b1-9ca7-4cc2149792d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.468859] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b30720-b8ff-9b82-4fca-9a532c449774, 'name': SearchDatastore_Task, 'duration_secs': 0.009511} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.482457] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.482746] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1/57d93827-2a5a-4f12-a74b-147a1a934dd1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1536.491282] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1536.494124] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64dfb4f2-d935-4467-a80e-3568fc19ce96 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.497507] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e75a3de4-051a-4d65-9458-bf98a0c03988 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.516996] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1536.516996] env[63297]: value = "task-1697763" [ 1536.516996] env[63297]: _type = "Task" [ 1536.516996] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.521839] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1536.521839] env[63297]: value = "task-1697764" [ 1536.521839] env[63297]: _type = "Task" [ 1536.521839] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.530474] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697763, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.538777] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.618854] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e87e6e-7299-41ac-8f29-44d5b734768f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.628475] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bb8ba6-1a23-4831-9695-da20e51ee240 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.665756] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074df949-b006-4280-a073-2b36b6403fe6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.674394] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c669557-03be-4a55-9677-8b3f9b87974b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.688608] env[63297]: DEBUG nova.compute.provider_tree [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.733384] env[63297]: INFO nova.compute.manager [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Took 44.99 seconds to build instance. [ 1536.913024] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697761, 'name': ReconfigVM_Task, 'duration_secs': 0.61648} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.913024] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 5cdb44c7-3dc1-4bce-8864-a1a40150e730/5cdb44c7-3dc1-4bce-8864-a1a40150e730.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1536.913024] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 13706c85-c23e-47cd-a7d8-2e902c11a7fb] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1536.913024] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f7ad215-df06-4c7c-b949-0e525829bda1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.928877] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1536.928877] env[63297]: value = "task-1697765" [ 1536.928877] env[63297]: _type = "Task" [ 1536.928877] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.945619] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697765, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.948468] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.965924] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1536.967980] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1bd5643f-7a95-40b9-9dae-ca1026e4da5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.977168] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1536.977168] env[63297]: value = "task-1697766" [ 1536.977168] env[63297]: _type = "Task" [ 1536.977168] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.986690] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697766, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.995265] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1ba85793895e34b8487e8e1e1549b4be',container_format='bare',created_at=2024-12-10T17:26:41Z,direct_url=,disk_format='vmdk',id=dd69ed84-813a-498d-a635-ba1ad9182458,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-2106355755-shelved',owner='baf40bf7f33349cb8bb098887d1244ac',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-12-10T17:26:56Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1536.995422] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1536.995564] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1536.995719] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1536.995865] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1536.996065] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1536.996485] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1536.996713] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1536.996902] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1536.997083] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1536.997289] env[63297]: DEBUG nova.virt.hardware [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1536.998497] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c97862-e283-4f1e-9af2-a0c5c80f6dd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.006229] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea42074-c118-402c-a956-75efee612a0a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.021758] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:0a:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1dd69c1e-7eee-4b1b-b4a7-421ab5477495', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1537.029683] env[63297]: DEBUG oslo.service.loopingcall [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1537.033032] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1537.036611] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c839cc7b-e130-4579-8bf4-46ae1975ba4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.056752] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697763, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478776} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.060809] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1/57d93827-2a5a-4f12-a74b-147a1a934dd1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1537.061077] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1537.061368] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697764, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.061570] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1537.061570] env[63297]: value = "task-1697767" [ 1537.061570] env[63297]: _type = "Task" [ 1537.061570] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.061752] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b424683c-f243-40af-9741-10442fa0795e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.072788] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697767, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.074166] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1537.074166] env[63297]: value = "task-1697768" [ 1537.074166] env[63297]: _type = "Task" [ 1537.074166] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.082134] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697768, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.193343] env[63297]: DEBUG nova.scheduler.client.report [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1537.235150] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1c4ffd66-64f9-45ab-b5fa-15a3a55e9b1d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "5a868570-7504-4262-80b2-a458c219e689" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.508s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.422692] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b5d34058-fa3e-4806-97e5-638bbbffaeb8] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1537.437353] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697765, 'name': Rename_Task, 'duration_secs': 0.248804} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.437658] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1537.437912] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4a60fff-763c-4495-b145-39aad912b139 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.445169] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1537.445169] env[63297]: value = "task-1697769" [ 1537.445169] env[63297]: _type = "Task" [ 1537.445169] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.454196] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.488880] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697766, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.543369] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697764, 'name': ReconfigVM_Task, 'duration_secs': 0.620502} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.543614] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Reconfigured VM instance instance-0000004d to attach disk [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1537.544556] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20a8711-2ced-43b9-9630-f79886dafafd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.578222] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4350025-0525-4c1f-906c-01c8cb0ab838 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.589520] env[63297]: DEBUG nova.compute.manager [req-d6853c8e-927c-46d0-9994-2d0e37af5407 req-80beef34-fd04-4800-8e3a-f055d7f3f95e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received event network-changed-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.589708] env[63297]: DEBUG nova.compute.manager [req-d6853c8e-927c-46d0-9994-2d0e37af5407 req-80beef34-fd04-4800-8e3a-f055d7f3f95e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Refreshing instance network info cache due to event network-changed-1dd69c1e-7eee-4b1b-b4a7-421ab5477495. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1537.589970] env[63297]: DEBUG oslo_concurrency.lockutils [req-d6853c8e-927c-46d0-9994-2d0e37af5407 req-80beef34-fd04-4800-8e3a-f055d7f3f95e service nova] Acquiring lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.590185] env[63297]: DEBUG oslo_concurrency.lockutils [req-d6853c8e-927c-46d0-9994-2d0e37af5407 req-80beef34-fd04-4800-8e3a-f055d7f3f95e service nova] Acquired lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.590354] env[63297]: DEBUG nova.network.neutron [req-d6853c8e-927c-46d0-9994-2d0e37af5407 req-80beef34-fd04-4800-8e3a-f055d7f3f95e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Refreshing network info cache for port 1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.601737] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697767, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.605683] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697768, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073026} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.605987] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1537.605987] env[63297]: value = "task-1697770" [ 1537.605987] env[63297]: _type = "Task" [ 1537.605987] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.606748] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1537.608069] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641edd17-e3dd-4d5e-807a-c683d51a6bd5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.628420] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.637742] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1/57d93827-2a5a-4f12-a74b-147a1a934dd1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1537.638598] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a104f5c-7de8-478c-bcd0-c6dd07c122ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.660073] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1537.660073] env[63297]: value = "task-1697771" [ 1537.660073] env[63297]: _type = "Task" [ 1537.660073] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.668776] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697771, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.698398] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.701935] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.441s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.701935] env[63297]: DEBUG nova.objects.instance [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lazy-loading 'resources' on Instance uuid 14b4518e-044a-451a-845d-fa3742e5b3e2 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1537.735697] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquiring lock "5a868570-7504-4262-80b2-a458c219e689" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.736034] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "5a868570-7504-4262-80b2-a458c219e689" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.736830] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquiring lock "5a868570-7504-4262-80b2-a458c219e689-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.736830] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "5a868570-7504-4262-80b2-a458c219e689-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.736830] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "5a868570-7504-4262-80b2-a458c219e689-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.739620] env[63297]: INFO nova.compute.manager [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Terminating instance [ 1537.742993] env[63297]: DEBUG nova.compute.manager [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1537.743278] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1537.744803] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab330b8-3652-4486-bef5-859a2d9484af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.755185] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1537.755270] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d41a40ec-7459-4b01-98bb-3c9028e73b81 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.759907] env[63297]: INFO nova.scheduler.client.report [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Deleted allocations for instance fc54a008-eb2e-4b10-86ea-be7c82b93139 [ 1537.765178] env[63297]: DEBUG oslo_vmware.api [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1537.765178] env[63297]: value = "task-1697772" [ 1537.765178] env[63297]: _type = "Task" [ 1537.765178] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.773716] env[63297]: DEBUG oslo_vmware.api [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697772, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.928349] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef57101e-1d8a-4ad5-ad68-cad2dbea33d1] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1537.955637] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697769, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.989094] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697766, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.081464] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697767, 'name': CreateVM_Task, 'duration_secs': 0.829522} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.081696] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1538.082371] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.082542] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.082923] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1538.083196] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b6fdcee-cef8-474a-a7f4-96d1ac2f81ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.088728] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1538.088728] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bd6e73-09a8-41f8-6357-fb93b266fb45" [ 1538.088728] env[63297]: _type = "Task" [ 1538.088728] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.104599] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.104897] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Processing image dd69ed84-813a-498d-a635-ba1ad9182458 {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1538.105173] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458/dd69ed84-813a-498d-a635-ba1ad9182458.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.105333] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458/dd69ed84-813a-498d-a635-ba1ad9182458.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.105510] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1538.105786] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfc34131-a7a6-4f42-8edd-ba695e6d64cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.115952] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697770, 'name': ReconfigVM_Task, 'duration_secs': 0.314915} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.115952] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1538.115952] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b38b838-8e44-4e75-8deb-5d4fdfdc1915 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.119691] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1538.119868] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1538.120584] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c59aec45-2978-449a-afe7-a3417e270b65 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.123981] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1538.123981] env[63297]: value = "task-1697773" [ 1538.123981] env[63297]: _type = "Task" [ 1538.123981] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.134797] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1538.134797] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52448c68-da30-c13f-a1df-51bee2411313" [ 1538.134797] env[63297]: _type = "Task" [ 1538.134797] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.139099] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697773, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.148470] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Preparing fetch location {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1538.148742] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Fetch image to [datastore1] OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111/OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111.vmdk {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1538.148922] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Downloading stream optimized image dd69ed84-813a-498d-a635-ba1ad9182458 to [datastore1] OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111/OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111.vmdk on the data store datastore1 as vApp {{(pid=63297) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1538.149113] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Downloading image file data dd69ed84-813a-498d-a635-ba1ad9182458 to the ESX as VM named 'OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111' {{(pid=63297) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1538.171197] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697771, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.239051] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1538.239051] env[63297]: value = "resgroup-9" [ 1538.239051] env[63297]: _type = "ResourcePool" [ 1538.239051] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1538.239783] env[63297]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b930c2f5-4c36-48ff-a28a-954753e41647 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.267559] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lease: (returnval){ [ 1538.267559] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f4b238-7efd-298b-3ac7-7322d37b1ee8" [ 1538.267559] env[63297]: _type = "HttpNfcLease" [ 1538.267559] env[63297]: } obtained for vApp import into resource pool (val){ [ 1538.267559] env[63297]: value = "resgroup-9" [ 1538.267559] env[63297]: _type = "ResourcePool" [ 1538.267559] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1538.267810] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the lease: (returnval){ [ 1538.267810] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f4b238-7efd-298b-3ac7-7322d37b1ee8" [ 1538.267810] env[63297]: _type = "HttpNfcLease" [ 1538.267810] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1538.268991] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a9484972-320c-4c9e-aabe-71d4611f03f0 tempest-MultipleCreateTestJSON-280669152 tempest-MultipleCreateTestJSON-280669152-project-member] Lock "fc54a008-eb2e-4b10-86ea-be7c82b93139" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.340s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.290304] env[63297]: DEBUG oslo_vmware.api [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697772, 'name': PowerOffVM_Task, 'duration_secs': 0.218454} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.291908] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1538.292131] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1538.292397] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1538.292397] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f4b238-7efd-298b-3ac7-7322d37b1ee8" [ 1538.292397] env[63297]: _type = "HttpNfcLease" [ 1538.292397] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1538.295560] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16853e29-f0f4-407d-862f-4819f388f273 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.359698] env[63297]: DEBUG nova.network.neutron [req-d6853c8e-927c-46d0-9994-2d0e37af5407 req-80beef34-fd04-4800-8e3a-f055d7f3f95e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updated VIF entry in instance network info cache for port 1dd69c1e-7eee-4b1b-b4a7-421ab5477495. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1538.359698] env[63297]: DEBUG nova.network.neutron [req-d6853c8e-927c-46d0-9994-2d0e37af5407 req-80beef34-fd04-4800-8e3a-f055d7f3f95e service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [{"id": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "address": "fa:16:3e:13:0a:af", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd69c1e-7e", "ovs_interfaceid": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.391650] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1538.391650] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1538.391650] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Deleting the datastore file [datastore1] 5a868570-7504-4262-80b2-a458c219e689 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1538.391650] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4a7e289-9778-48cd-90ab-7fa0f2ebc81b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.398953] env[63297]: DEBUG oslo_vmware.api [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for the task: (returnval){ [ 1538.398953] env[63297]: value = "task-1697776" [ 1538.398953] env[63297]: _type = "Task" [ 1538.398953] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.409639] env[63297]: DEBUG oslo_vmware.api [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.431720] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 961c3a87-7f53-4764-b8a4-40a408a30f90] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1538.460388] env[63297]: DEBUG oslo_vmware.api [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697769, 'name': PowerOnVM_Task, 'duration_secs': 0.903184} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.461259] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1538.461604] env[63297]: INFO nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Took 8.68 seconds to spawn the instance on the hypervisor. [ 1538.461953] env[63297]: DEBUG nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1538.465990] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb8f34d-f7b7-493b-be24-2c82de36858b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.490338] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697766, 'name': CreateSnapshot_Task, 'duration_secs': 1.192898} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.493935] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1538.494863] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02312ef5-b342-4cb0-be33-814d64b8af08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.604016] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b8263e-3dfc-4f40-b9a4-54817f671273 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.611896] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cb0645-ab0c-492f-8c11-faad33ed4078 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.645680] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d078df9d-29f5-49de-9fb3-789f80990dad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.656169] env[63297]: DEBUG oslo_vmware.api [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697773, 'name': PowerOnVM_Task, 'duration_secs': 0.460815} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.657477] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746f3d28-a57f-49ea-b667-211782a17af7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.661619] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1538.667511] env[63297]: DEBUG nova.compute.manager [None req-8e154def-a490-4a92-a1c2-eff2987c6c5a tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1538.668363] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86364b3c-0b15-4ee7-955c-af84b526e4ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.679978] env[63297]: DEBUG nova.compute.provider_tree [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1538.684469] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697771, 'name': ReconfigVM_Task, 'duration_secs': 0.628427} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.684989] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1/57d93827-2a5a-4f12-a74b-147a1a934dd1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1538.687014] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-165b46d4-a04e-4d80-b353-6ef6a3786a2f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.698017] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1538.698017] env[63297]: value = "task-1697777" [ 1538.698017] env[63297]: _type = "Task" [ 1538.698017] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.712212] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697777, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.787034] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1538.787034] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f4b238-7efd-298b-3ac7-7322d37b1ee8" [ 1538.787034] env[63297]: _type = "HttpNfcLease" [ 1538.787034] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1538.861957] env[63297]: DEBUG oslo_concurrency.lockutils [req-d6853c8e-927c-46d0-9994-2d0e37af5407 req-80beef34-fd04-4800-8e3a-f055d7f3f95e service nova] Releasing lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.912929] env[63297]: DEBUG oslo_vmware.api [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Task: {'id': task-1697776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169738} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.912929] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1538.912929] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1538.912929] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1538.912929] env[63297]: INFO nova.compute.manager [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] [instance: 5a868570-7504-4262-80b2-a458c219e689] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1538.912929] env[63297]: DEBUG oslo.service.loopingcall [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1538.912929] env[63297]: DEBUG nova.compute.manager [-] [instance: 5a868570-7504-4262-80b2-a458c219e689] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1538.912929] env[63297]: DEBUG nova.network.neutron [-] [instance: 5a868570-7504-4262-80b2-a458c219e689] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1538.935663] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef851d71-788d-42f8-a824-5d30a89e957b] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1538.988685] env[63297]: INFO nova.compute.manager [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Took 41.24 seconds to build instance. [ 1539.026084] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1539.026084] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-98577872-bf76-4430-bf2c-916922737e91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.036571] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1539.036571] env[63297]: value = "task-1697778" [ 1539.036571] env[63297]: _type = "Task" [ 1539.036571] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.047597] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697778, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.186428] env[63297]: DEBUG nova.scheduler.client.report [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1539.208465] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697777, 'name': Rename_Task, 'duration_secs': 0.171307} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.208766] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1539.210563] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd24f721-497a-469e-bf3f-0a734feec8d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.216822] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1539.216822] env[63297]: value = "task-1697779" [ 1539.216822] env[63297]: _type = "Task" [ 1539.216822] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.224821] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.284287] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1539.284287] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f4b238-7efd-298b-3ac7-7322d37b1ee8" [ 1539.284287] env[63297]: _type = "HttpNfcLease" [ 1539.284287] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1539.284593] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1539.284593] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f4b238-7efd-298b-3ac7-7322d37b1ee8" [ 1539.284593] env[63297]: _type = "HttpNfcLease" [ 1539.284593] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1539.285349] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a6a6b5-9fac-45b9-90bb-6e6a6ceacdae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.293838] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a218bb-3471-06ab-1ada-4d941237b63e/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1539.293838] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a218bb-3471-06ab-1ada-4d941237b63e/disk-0.vmdk. {{(pid=63297) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1539.363130] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-425f4999-7ff6-44b4-9b09-fa7e9411e353 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.439999] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 8adfd26f-1012-4e52-9371-e9d3f654046c] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1539.491326] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0c5349fc-a047-4e09-9364-7cf14741b562 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.754s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.549166] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697778, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.588854] env[63297]: DEBUG nova.compute.manager [req-127d0a81-fe34-4e90-b274-2268820a62da req-4089cd4a-efc2-4acd-b12e-3d8204827952 service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Received event network-vif-deleted-4edba2de-a7ec-4a5d-889c-b76110d2b060 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1539.588854] env[63297]: INFO nova.compute.manager [req-127d0a81-fe34-4e90-b274-2268820a62da req-4089cd4a-efc2-4acd-b12e-3d8204827952 service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Neutron deleted interface 4edba2de-a7ec-4a5d-889c-b76110d2b060; detaching it from the instance and deleting it from the info cache [ 1539.589529] env[63297]: DEBUG nova.network.neutron [req-127d0a81-fe34-4e90-b274-2268820a62da req-4089cd4a-efc2-4acd-b12e-3d8204827952 service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.692025] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.991s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.697525] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.983s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.697885] env[63297]: DEBUG nova.objects.instance [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lazy-loading 'resources' on Instance uuid 5914b3ce-f40f-4782-b56a-9fc29c819938 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1539.734358] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697779, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.751067] env[63297]: INFO nova.scheduler.client.report [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted allocations for instance 14b4518e-044a-451a-845d-fa3742e5b3e2 [ 1539.759420] env[63297]: DEBUG nova.network.neutron [-] [instance: 5a868570-7504-4262-80b2-a458c219e689] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.943219] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 81920a24-f406-4923-98b7-cc0f3d0ccc8b] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1540.032803] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.033146] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.033381] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.033572] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.033793] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.036955] env[63297]: INFO nova.compute.manager [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Terminating instance [ 1540.046032] env[63297]: DEBUG nova.compute.manager [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1540.046032] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1540.046971] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7085e8-e54e-4872-9e13-f7a485d920a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.061360] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697778, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.061360] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1540.061360] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b888f6f2-0be1-4093-b7c1-2aedc009e87d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.069067] env[63297]: DEBUG oslo_vmware.api [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1540.069067] env[63297]: value = "task-1697780" [ 1540.069067] env[63297]: _type = "Task" [ 1540.069067] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.079434] env[63297]: DEBUG oslo_vmware.api [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697780, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.093480] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-454f910d-6e7c-436d-abd3-6f1eb0f712a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.103460] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5525a96-6d6c-4ad3-abfe-c1dea1e43c66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.139531] env[63297]: DEBUG nova.compute.manager [req-127d0a81-fe34-4e90-b274-2268820a62da req-4089cd4a-efc2-4acd-b12e-3d8204827952 service nova] [instance: 5a868570-7504-4262-80b2-a458c219e689] Detach interface failed, port_id=4edba2de-a7ec-4a5d-889c-b76110d2b060, reason: Instance 5a868570-7504-4262-80b2-a458c219e689 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1540.240507] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697779, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.255499] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f890e7e4-9d94-4ce5-b87b-aa87cc14ec39 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "14b4518e-044a-451a-845d-fa3742e5b3e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.466s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.268706] env[63297]: INFO nova.compute.manager [-] [instance: 5a868570-7504-4262-80b2-a458c219e689] Took 1.36 seconds to deallocate network for instance. [ 1540.450290] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3d66ef2c-ac35-4eae-a205-6dd80ee564d1] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1540.553784] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697778, 'name': CloneVM_Task, 'duration_secs': 1.420809} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.560575] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Created linked-clone VM from snapshot [ 1540.562035] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a58147f-3149-44c3-a076-25ac166f63a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.572394] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Uploading image dc932a9e-d404-4f38-8fb5-26f5f3f4b879 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1540.605676] env[63297]: DEBUG oslo_vmware.api [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697780, 'name': PowerOffVM_Task, 'duration_secs': 0.260132} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.607892] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1540.608021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1540.610512] env[63297]: DEBUG oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1540.610512] env[63297]: value = "vm-353946" [ 1540.610512] env[63297]: _type = "VirtualMachine" [ 1540.610512] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1540.611069] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-059efb28-34dc-4c08-ae05-dcc26e94c924 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.613443] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1d7f637b-25e7-4502-85df-9fc7aba859b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.632201] env[63297]: DEBUG oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lease: (returnval){ [ 1540.632201] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52696bb9-4667-a31a-3e6b-aad9a873e78c" [ 1540.632201] env[63297]: _type = "HttpNfcLease" [ 1540.632201] env[63297]: } obtained for exporting VM: (result){ [ 1540.632201] env[63297]: value = "vm-353946" [ 1540.632201] env[63297]: _type = "VirtualMachine" [ 1540.632201] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1540.634041] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the lease: (returnval){ [ 1540.634041] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52696bb9-4667-a31a-3e6b-aad9a873e78c" [ 1540.634041] env[63297]: _type = "HttpNfcLease" [ 1540.634041] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1540.647726] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1540.647726] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52696bb9-4667-a31a-3e6b-aad9a873e78c" [ 1540.647726] env[63297]: _type = "HttpNfcLease" [ 1540.647726] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1540.648021] env[63297]: DEBUG oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1540.648021] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52696bb9-4667-a31a-3e6b-aad9a873e78c" [ 1540.648021] env[63297]: _type = "HttpNfcLease" [ 1540.648021] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1540.648849] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e078d3f7-0b5c-428d-bb7e-793f39912956 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.663529] env[63297]: DEBUG oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ec85f-2d79-a2e2-eaa1-ab5a62d31056/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1540.664433] env[63297]: DEBUG oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ec85f-2d79-a2e2-eaa1-ab5a62d31056/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1540.748773] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12895377-0f25-4201-8863-5dc957e86306 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.752675] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1540.752923] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1540.753148] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleting the datastore file [datastore1] 5cdb44c7-3dc1-4bce-8864-a1a40150e730 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1540.756692] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12eccf23-2650-4f7d-92f7-d193e6d52cd8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.766482] env[63297]: DEBUG oslo_vmware.api [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697779, 'name': PowerOnVM_Task, 'duration_secs': 1.076261} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.773029] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1540.773159] env[63297]: INFO nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Took 8.54 seconds to spawn the instance on the hypervisor. [ 1540.773344] env[63297]: DEBUG nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1540.774354] env[63297]: DEBUG oslo_vmware.api [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1540.774354] env[63297]: value = "task-1697783" [ 1540.774354] env[63297]: _type = "Task" [ 1540.774354] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.775009] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca22f53-df1a-4b12-a5c0-e9b01324c6fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.778880] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12da7fa9-330b-426a-9f3b-f981840653aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.786412] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.795986] env[63297]: DEBUG oslo_vmware.api [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.837640] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d90ee940-90c3-4e74-a0a0-d93f18aea72d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.840293] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7566378-2011-4078-a5c3-f8d4e27eacd6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.854539] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff975c24-e24c-46d6-b37f-a4dc4b5af2af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.871404] env[63297]: DEBUG nova.compute.provider_tree [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1540.882576] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Completed reading data from the image iterator. {{(pid=63297) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1540.884928] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a218bb-3471-06ab-1ada-4d941237b63e/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1540.884928] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6857094b-42ef-4253-b263-23e2cf9a0bbe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.894597] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a218bb-3471-06ab-1ada-4d941237b63e/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1540.894844] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a218bb-3471-06ab-1ada-4d941237b63e/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1540.895340] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-efc294d5-d776-46db-9f07-80d72943e476 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.955867] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ef3346b1-ce09-4616-bdf4-200ea31efd01] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1541.124359] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "b14e8466-68ab-4705-a439-6db961a149b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.124648] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "b14e8466-68ab-4705-a439-6db961a149b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.124873] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "b14e8466-68ab-4705-a439-6db961a149b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.125068] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "b14e8466-68ab-4705-a439-6db961a149b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.125239] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "b14e8466-68ab-4705-a439-6db961a149b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.127942] env[63297]: INFO nova.compute.manager [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Terminating instance [ 1541.129948] env[63297]: DEBUG nova.compute.manager [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1541.130201] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1541.131126] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f140f14-14ab-4b35-8246-cb83c25b0d77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.140238] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1541.140545] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8164f08b-339c-434b-b528-1ddc1eabf332 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.148950] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1541.148950] env[63297]: value = "task-1697784" [ 1541.148950] env[63297]: _type = "Task" [ 1541.148950] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.157786] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.294724] env[63297]: DEBUG oslo_vmware.api [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188973} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.296022] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1541.296267] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1541.296448] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1541.296625] env[63297]: INFO nova.compute.manager [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1541.296862] env[63297]: DEBUG oslo.service.loopingcall [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1541.297251] env[63297]: DEBUG oslo_vmware.rw_handles [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a218bb-3471-06ab-1ada-4d941237b63e/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1541.297470] env[63297]: INFO nova.virt.vmwareapi.images [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Downloaded image file data dd69ed84-813a-498d-a635-ba1ad9182458 [ 1541.297723] env[63297]: DEBUG nova.compute.manager [-] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1541.297865] env[63297]: DEBUG nova.network.neutron [-] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1541.299988] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8260f93d-c2a8-42a3-877c-b1dfd152ace2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.319580] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48df3d19-fd8f-4ebd-802f-92f64b4a9e34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.350083] env[63297]: INFO nova.compute.manager [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Took 39.99 seconds to build instance. [ 1541.373708] env[63297]: INFO nova.virt.vmwareapi.images [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] The imported VM was unregistered [ 1541.376735] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Caching image {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1541.376992] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Creating directory with path [datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458 {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1541.378078] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74332594-11ac-4b4d-bef8-4f28c45ac320 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.381455] env[63297]: DEBUG nova.scheduler.client.report [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1541.407709] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Created directory with path [datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458 {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1541.409656] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111/OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111.vmdk to [datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458/dd69ed84-813a-498d-a635-ba1ad9182458.vmdk. {{(pid=63297) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1541.410250] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-cee58ff6-1224-4816-b664-e775b6678e0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.420835] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1541.420835] env[63297]: value = "task-1697786" [ 1541.420835] env[63297]: _type = "Task" [ 1541.420835] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.437777] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697786, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.461745] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: cce038d4-dc9a-4fae-8348-1c2f674b79e3] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1541.665322] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697784, 'name': PowerOffVM_Task, 'duration_secs': 0.300489} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.665322] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1541.665322] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1541.665800] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7aac5d7-518c-4e46-b7ad-36351b9b4818 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.779362] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1541.779745] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1541.780050] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleting the datastore file [datastore1] b14e8466-68ab-4705-a439-6db961a149b0 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1541.780469] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-940ab6ac-9eae-4515-b027-5ddf406b32e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.792613] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for the task: (returnval){ [ 1541.792613] env[63297]: value = "task-1697788" [ 1541.792613] env[63297]: _type = "Task" [ 1541.792613] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.807766] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.817996] env[63297]: DEBUG nova.compute.manager [req-63c2381e-9140-4270-bd6f-cab6b015b110 req-be376bb8-21c3-4510-a07f-a7f78848b6ab service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Received event network-vif-deleted-c5d88999-9081-4745-8997-a2c43582e237 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1541.818255] env[63297]: INFO nova.compute.manager [req-63c2381e-9140-4270-bd6f-cab6b015b110 req-be376bb8-21c3-4510-a07f-a7f78848b6ab service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Neutron deleted interface c5d88999-9081-4745-8997-a2c43582e237; detaching it from the instance and deleting it from the info cache [ 1541.818502] env[63297]: DEBUG nova.network.neutron [req-63c2381e-9140-4270-bd6f-cab6b015b110 req-be376bb8-21c3-4510-a07f-a7f78848b6ab service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.855548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94e93215-ba48-474b-9618-884056e4c219 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "57d93827-2a5a-4f12-a74b-147a1a934dd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.507s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.888999] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.191s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.893705] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.959s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.895407] env[63297]: INFO nova.compute.claims [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1541.924479] env[63297]: INFO nova.scheduler.client.report [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Deleted allocations for instance 5914b3ce-f40f-4782-b56a-9fc29c819938 [ 1541.943949] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697786, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.967994] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 7f8849fb-c5d6-47a1-8079-08dfb2e0b85a] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1542.201081] env[63297]: DEBUG nova.network.neutron [-] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1542.308972] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.321874] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83940adc-6403-4402-93de-c295b03ab780 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.334246] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2501d70-e028-4cc1-8146-c5a2623fd913 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.373086] env[63297]: DEBUG nova.compute.manager [req-63c2381e-9140-4270-bd6f-cab6b015b110 req-be376bb8-21c3-4510-a07f-a7f78848b6ab service nova] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Detach interface failed, port_id=c5d88999-9081-4745-8997-a2c43582e237, reason: Instance 5cdb44c7-3dc1-4bce-8864-a1a40150e730 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1542.381722] env[63297]: INFO nova.compute.manager [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Rescuing [ 1542.382352] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.382621] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.382837] env[63297]: DEBUG nova.network.neutron [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1542.446213] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697786, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.446213] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6babec85-024e-49f8-8cf8-c7da5a92bfd0 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "5914b3ce-f40f-4782-b56a-9fc29c819938" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.715s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.471990] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: e5f198e8-2080-4e3e-8ad5-964b855d70ff] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1542.493629] env[63297]: INFO nova.compute.manager [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Rebuilding instance [ 1542.549981] env[63297]: DEBUG nova.compute.manager [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1542.551190] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c296a223-7463-4061-854b-ab4a911603e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.704989] env[63297]: INFO nova.compute.manager [-] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Took 1.41 seconds to deallocate network for instance. [ 1542.781511] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.781863] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.782627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.782627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.782627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.784893] env[63297]: INFO nova.compute.manager [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Terminating instance [ 1542.787595] env[63297]: DEBUG nova.compute.manager [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1542.787911] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1542.788904] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285e161c-e441-40cb-9eb4-5e502d95a1b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.801635] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1542.805785] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a018090-8d24-40f6-a533-d59b1534d146 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.807871] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.814347] env[63297]: DEBUG oslo_vmware.api [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1542.814347] env[63297]: value = "task-1697789" [ 1542.814347] env[63297]: _type = "Task" [ 1542.814347] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.824988] env[63297]: DEBUG oslo_vmware.api [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.940580] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697786, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.975654] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.975997] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances with incomplete migration {{(pid=63297) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1543.065983] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1543.072589] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-415a570f-649b-460a-a32a-e89167ed5c8b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.083020] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1543.083020] env[63297]: value = "task-1697790" [ 1543.083020] env[63297]: _type = "Task" [ 1543.083020] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.091404] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697790, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.213148] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.224873] env[63297]: DEBUG nova.network.neutron [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Updating instance_info_cache with network_info: [{"id": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "address": "fa:16:3e:10:ae:3e", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6875a0ac-55", "ovs_interfaceid": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.257489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9a75c7-8e04-4375-8744-1a8eabe91633 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.271348] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88374649-3cb2-4412-a2ef-106e183a39ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.314325] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4ebb9b-7353-41d0-a5c6-b9532a208aa3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.325803] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.331641] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f33d15d-9850-4274-99d3-0f0f0ada6583 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.336106] env[63297]: DEBUG oslo_vmware.api [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.348436] env[63297]: DEBUG nova.compute.provider_tree [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.442775] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697786, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.478674] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.596276] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697790, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.732083] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.823678] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.832448] env[63297]: DEBUG oslo_vmware.api [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.853900] env[63297]: DEBUG nova.scheduler.client.report [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1543.944842] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697786, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.093914] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697790, 'name': PowerOffVM_Task, 'duration_secs': 0.583145} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.094361] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1544.095301] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1544.096108] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7c6b89-9694-4b4f-baed-33bb9904bee4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.103499] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1544.103649] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e7fb533-4bd3-4f26-b26e-f5268406a3d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.201983] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1544.202303] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1544.202497] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleting the datastore file [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1544.202823] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a220dadc-5d43-4ac7-aba2-0e496b7a3d14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.209846] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1544.209846] env[63297]: value = "task-1697792" [ 1544.209846] env[63297]: _type = "Task" [ 1544.209846] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.218723] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.283147] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1544.283481] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88e62722-9e7d-4a86-a2e7-2c573204871d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.291454] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1544.291454] env[63297]: value = "task-1697793" [ 1544.291454] env[63297]: _type = "Task" [ 1544.291454] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.301017] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.316492] env[63297]: DEBUG oslo_vmware.api [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Task: {'id': task-1697788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.068038} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.320328] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1544.320665] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1544.320887] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1544.321352] env[63297]: INFO nova.compute.manager [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Took 3.19 seconds to destroy the instance on the hypervisor. [ 1544.322043] env[63297]: DEBUG oslo.service.loopingcall [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.322043] env[63297]: DEBUG nova.compute.manager [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1544.322043] env[63297]: DEBUG nova.network.neutron [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1544.328986] env[63297]: DEBUG oslo_vmware.api [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697789, 'name': PowerOffVM_Task, 'duration_secs': 1.345682} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.329309] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1544.329549] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1544.329887] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-478dac06-2555-47d0-a723-82d95683fe4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.360336] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.361037] env[63297]: DEBUG nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1544.364234] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.740s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.364348] env[63297]: DEBUG nova.objects.instance [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lazy-loading 'resources' on Instance uuid 9b1306f9-4b0a-4116-8e79-271478f33490 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1544.424711] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1544.424945] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1544.425160] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Deleting the datastore file [datastore1] 89c9cd40-585e-4ae6-88b3-1a33a94c3b52 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1544.425494] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94cc4d55-36ac-4448-8b20-14cd89043c88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.436669] env[63297]: DEBUG oslo_vmware.api [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for the task: (returnval){ [ 1544.436669] env[63297]: value = "task-1697795" [ 1544.436669] env[63297]: _type = "Task" [ 1544.436669] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.444089] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697786, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.777545} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.447396] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111/OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111.vmdk to [datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458/dd69ed84-813a-498d-a635-ba1ad9182458.vmdk. [ 1544.447696] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Cleaning up location [datastore1] OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111 {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1544.447830] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_81233771-c4a2-417b-a8c4-83de9ab18111 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1544.451497] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f76734a8-4ee7-498c-90d1-d3e178c6ee4e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.455259] env[63297]: DEBUG oslo_vmware.api [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.462607] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1544.462607] env[63297]: value = "task-1697796" [ 1544.462607] env[63297]: _type = "Task" [ 1544.462607] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.474433] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.722975] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395528} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.722975] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1544.723346] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1544.723677] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1544.792108] env[63297]: DEBUG nova.compute.manager [req-d60528b8-6be1-4cf5-9e64-8901dfedc7b1 req-4fde8c59-b3d2-4dd9-973f-3f08213be304 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Received event network-vif-deleted-603a207a-5c56-4835-a1be-961da01f6f07 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1544.792338] env[63297]: INFO nova.compute.manager [req-d60528b8-6be1-4cf5-9e64-8901dfedc7b1 req-4fde8c59-b3d2-4dd9-973f-3f08213be304 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Neutron deleted interface 603a207a-5c56-4835-a1be-961da01f6f07; detaching it from the instance and deleting it from the info cache [ 1544.792512] env[63297]: DEBUG nova.network.neutron [req-d60528b8-6be1-4cf5-9e64-8901dfedc7b1 req-4fde8c59-b3d2-4dd9-973f-3f08213be304 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.809086] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697793, 'name': PowerOffVM_Task, 'duration_secs': 0.322178} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.810343] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1544.810343] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5803c3f-c560-4ab3-89b1-08d3549f4260 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.832176] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de8ac52-cf03-4b6e-aa53-eb1be7b4c3cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.869360] env[63297]: DEBUG nova.compute.utils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1544.875957] env[63297]: DEBUG nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1544.877059] env[63297]: DEBUG nova.network.neutron [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1544.903933] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1544.904423] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c852db4-d9a0-411d-b48c-580c687e2a0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.917054] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1544.917054] env[63297]: value = "task-1697797" [ 1544.917054] env[63297]: _type = "Task" [ 1544.917054] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.930947] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1544.930947] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1544.930947] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.931101] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.931438] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1544.931725] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-776297e2-c07f-4a66-9c0a-506cd9e688a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.943828] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1544.944609] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1544.945621] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-483dc157-c641-473e-9c82-46803d6517ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.954283] env[63297]: DEBUG oslo_vmware.api [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Task: {'id': task-1697795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.336049} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.955659] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1544.955659] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1544.955659] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1544.955929] env[63297]: INFO nova.compute.manager [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Took 2.17 seconds to destroy the instance on the hypervisor. [ 1544.955994] env[63297]: DEBUG oslo.service.loopingcall [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.956210] env[63297]: DEBUG nova.compute.manager [-] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1544.957323] env[63297]: DEBUG nova.network.neutron [-] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1544.960766] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1544.960766] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ae6569-ffc0-9950-05d8-0e7415a90fb2" [ 1544.960766] env[63297]: _type = "Task" [ 1544.960766] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.968045] env[63297]: DEBUG nova.policy [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a20e0fe0bec4d2d92a217ac49722793', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '731a719bb2a44a53985d10e02f9397cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1544.981089] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ae6569-ffc0-9950-05d8-0e7415a90fb2, 'name': SearchDatastore_Task, 'duration_secs': 0.019339} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.984681] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076908} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.984910] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1d763ce-f435-40f1-b46c-68a58e3e2713 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.987350] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1544.987566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458/dd69ed84-813a-498d-a635-ba1ad9182458.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.987823] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458/dd69ed84-813a-498d-a635-ba1ad9182458.vmdk to [datastore1] fba9040d-f904-44a1-8785-14d4696ea939/fba9040d-f904-44a1-8785-14d4696ea939.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1544.988643] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f1e15eb-56ae-4767-8780-66ccb8541940 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.996790] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1544.996790] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c553fa-47b5-8156-8989-f42dbe6942cb" [ 1544.996790] env[63297]: _type = "Task" [ 1544.996790] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.003415] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1545.003415] env[63297]: value = "task-1697798" [ 1545.003415] env[63297]: _type = "Task" [ 1545.003415] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.011044] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c553fa-47b5-8156-8989-f42dbe6942cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.020053] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.161135] env[63297]: DEBUG nova.network.neutron [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.301975] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33bd9178-506d-4bc6-8fe6-729abcd5128c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.306178] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df4dd23-1ee2-46e0-9eb5-bbda94e8a9ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.316177] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e702335-45bd-4f0c-898b-f8c27855e63c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.331442] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512bcd5d-c3e4-4562-bdd3-9378cbc0c6cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.382410] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a52e644-5e39-4fe4-8aab-9f154abf26a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.385744] env[63297]: DEBUG nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1545.388946] env[63297]: DEBUG nova.compute.manager [req-d60528b8-6be1-4cf5-9e64-8901dfedc7b1 req-4fde8c59-b3d2-4dd9-973f-3f08213be304 service nova] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Detach interface failed, port_id=603a207a-5c56-4835-a1be-961da01f6f07, reason: Instance b14e8466-68ab-4705-a439-6db961a149b0 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1545.389993] env[63297]: DEBUG nova.network.neutron [-] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.397119] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6132e226-e080-47fa-a96f-32d05b699e6c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.415951] env[63297]: DEBUG nova.compute.provider_tree [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1545.468708] env[63297]: DEBUG nova.network.neutron [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Successfully created port: 287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1545.517021] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c553fa-47b5-8156-8989-f42dbe6942cb, 'name': SearchDatastore_Task, 'duration_secs': 0.019118} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.517753] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.518127] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. {{(pid=63297) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1545.518430] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697798, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.518648] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e55f8f8e-ba18-4cdf-95cf-84a5c328f3ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.528287] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1545.528287] env[63297]: value = "task-1697799" [ 1545.528287] env[63297]: _type = "Task" [ 1545.528287] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.540220] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.668054] env[63297]: INFO nova.compute.manager [-] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Took 1.35 seconds to deallocate network for instance. [ 1545.774492] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1545.776894] env[63297]: DEBUG nova.virt.hardware [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1545.777963] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2211cef8-d2fb-4670-ab83-d351f2502096 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.788532] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaca687-abf7-40c9-926c-af58602f5c27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.806544] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:cd:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb3d9f25-b216-49b6-90c4-a53298983b44', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1545.816779] env[63297]: DEBUG oslo.service.loopingcall [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.817172] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1545.817442] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-503675f2-0038-4cf0-a47e-8b3580cafc3a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.840846] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1545.840846] env[63297]: value = "task-1697800" [ 1545.840846] env[63297]: _type = "Task" [ 1545.840846] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.853156] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697800, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.896295] env[63297]: INFO nova.compute.manager [-] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Took 0.94 seconds to deallocate network for instance. [ 1545.921026] env[63297]: DEBUG nova.scheduler.client.report [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1546.016955] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697798, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.039258] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.175234] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.354848] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697800, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.403391] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.404523] env[63297]: DEBUG nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1546.430434] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1546.431293] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1546.431498] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1546.431699] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1546.431847] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1546.431995] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1546.432260] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1546.432428] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1546.432600] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1546.432804] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1546.432930] env[63297]: DEBUG nova.virt.hardware [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1546.433755] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.436427] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8570d9c6-74da-441e-b77d-96c336b42648 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.439557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.295s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.441126] env[63297]: INFO nova.compute.claims [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1546.452457] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8edd17d-742e-4d4c-a226-17331ee958d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.460854] env[63297]: INFO nova.scheduler.client.report [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Deleted allocations for instance 9b1306f9-4b0a-4116-8e79-271478f33490 [ 1546.517087] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697798, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.543598] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.822719] env[63297]: DEBUG nova.compute.manager [req-ba89c427-7d8a-40d4-b792-5793c28b2b40 req-3b7fa456-de4d-41cf-b9d0-f6d62bfbd1ce service nova] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Received event network-vif-deleted-3e1fce64-30c4-4df1-b40f-3c1c3c717df4 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.853547] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697800, 'name': CreateVM_Task, 'duration_secs': 0.559288} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.853603] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1546.854368] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.854570] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.855053] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1546.855765] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-859f1184-942e-425d-a038-b43e9c576fc3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.861521] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1546.861521] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5285f804-5f7c-536c-5c24-d1925ebff5c1" [ 1546.861521] env[63297]: _type = "Task" [ 1546.861521] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.873403] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5285f804-5f7c-536c-5c24-d1925ebff5c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.982252] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13226137-300b-4c26-80c9-d67c89dcd506 tempest-MigrationsAdminTest-697190469 tempest-MigrationsAdminTest-697190469-project-member] Lock "9b1306f9-4b0a-4116-8e79-271478f33490" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.342s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.019068] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697798, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.042874] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.367147] env[63297]: DEBUG nova.network.neutron [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Successfully updated port: 287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1547.376867] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5285f804-5f7c-536c-5c24-d1925ebff5c1, 'name': SearchDatastore_Task, 'duration_secs': 0.096769} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.377687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.377687] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1547.377687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.377877] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.381022] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1547.381022] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35c73562-e1b9-4c91-82e1-e6c6d8a48805 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.400454] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1547.400661] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1547.401528] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2f5ff10-42ea-4e95-b8d3-4eb4869c8a2a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.409924] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1547.409924] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52599a34-8799-1c42-49ef-6600c0833dc7" [ 1547.409924] env[63297]: _type = "Task" [ 1547.409924] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.422149] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52599a34-8799-1c42-49ef-6600c0833dc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.527498] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697798, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.543160] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697799, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.743834] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6f8a2b-8758-446f-a7ba-f46b15e7d6ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.752283] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfef2b9c-9b02-43c0-b387-6fb5719ed23a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.787239] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0137b2-d3d8-4688-b5c9-51eb0c814e80 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.795584] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03424f7a-f94d-47f4-85cd-95029fcca31e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.810167] env[63297]: DEBUG nova.compute.provider_tree [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1547.869899] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.870065] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.870224] env[63297]: DEBUG nova.network.neutron [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1547.923207] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52599a34-8799-1c42-49ef-6600c0833dc7, 'name': SearchDatastore_Task, 'duration_secs': 0.093587} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.924076] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54ab9114-e859-4f16-b242-e7890882c662 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.931748] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1547.931748] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5253ecdf-43a2-97ab-7517-89497eead138" [ 1547.931748] env[63297]: _type = "Task" [ 1547.931748] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.942151] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5253ecdf-43a2-97ab-7517-89497eead138, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.017349] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697798, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.772984} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.017664] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/dd69ed84-813a-498d-a635-ba1ad9182458/dd69ed84-813a-498d-a635-ba1ad9182458.vmdk to [datastore1] fba9040d-f904-44a1-8785-14d4696ea939/fba9040d-f904-44a1-8785-14d4696ea939.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1548.018441] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2b55dc-d72d-4c2a-9734-e5f60a291827 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.042300] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] fba9040d-f904-44a1-8785-14d4696ea939/fba9040d-f904-44a1-8785-14d4696ea939.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1548.046184] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc7fbac9-9361-473c-863a-8d42f7d92115 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.066608] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697799, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.446804} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.067922] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. [ 1548.068287] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1548.068287] env[63297]: value = "task-1697801" [ 1548.068287] env[63297]: _type = "Task" [ 1548.068287] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.069034] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c241f109-a029-4b7a-a80c-b50c149b7818 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.079398] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697801, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.101031] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1548.101031] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d69dc2f-2193-48e3-86c0-51036e51ebfc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.119400] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1548.119400] env[63297]: value = "task-1697802" [ 1548.119400] env[63297]: _type = "Task" [ 1548.119400] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.128156] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697802, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.313476] env[63297]: DEBUG nova.scheduler.client.report [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1548.404110] env[63297]: DEBUG nova.network.neutron [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1548.442274] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5253ecdf-43a2-97ab-7517-89497eead138, 'name': SearchDatastore_Task, 'duration_secs': 0.032384} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.442570] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.442828] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1/57d93827-2a5a-4f12-a74b-147a1a934dd1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1548.443112] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99ee5475-a8ef-41f7-a986-02f35309fd3c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.450786] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1548.450786] env[63297]: value = "task-1697803" [ 1548.450786] env[63297]: _type = "Task" [ 1548.450786] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.458593] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.581689] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697801, 'name': ReconfigVM_Task, 'duration_secs': 0.297608} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.582030] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Reconfigured VM instance instance-00000023 to attach disk [datastore1] fba9040d-f904-44a1-8785-14d4696ea939/fba9040d-f904-44a1-8785-14d4696ea939.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1548.582684] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f825cad7-1c83-43f9-8d41-00cac54b46f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.591099] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1548.591099] env[63297]: value = "task-1697804" [ 1548.591099] env[63297]: _type = "Task" [ 1548.591099] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.599466] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697804, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.602496] env[63297]: DEBUG nova.network.neutron [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updating instance_info_cache with network_info: [{"id": "287bdace-3df3-414a-8adb-2f8d720f8528", "address": "fa:16:3e:ec:df:59", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap287bdace-3d", "ovs_interfaceid": "287bdace-3df3-414a-8adb-2f8d720f8528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.631253] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697802, 'name': ReconfigVM_Task, 'duration_secs': 0.305945} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.631781] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1548.632932] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17e86a2-4542-445e-abee-e32a93f21573 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.661254] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbbc2909-dbfb-4577-8de8-0d1db3b31b00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.677631] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1548.677631] env[63297]: value = "task-1697805" [ 1548.677631] env[63297]: _type = "Task" [ 1548.677631] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.686612] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697805, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.819424] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.820133] env[63297]: DEBUG nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1548.827024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.038s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.827024] env[63297]: DEBUG nova.objects.instance [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lazy-loading 'resources' on Instance uuid 5a868570-7504-4262-80b2-a458c219e689 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1548.856953] env[63297]: DEBUG nova.compute.manager [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Received event network-vif-plugged-287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.856953] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.856953] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.856953] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.856953] env[63297]: DEBUG nova.compute.manager [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] No waiting events found dispatching network-vif-plugged-287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1548.856953] env[63297]: WARNING nova.compute.manager [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Received unexpected event network-vif-plugged-287bdace-3df3-414a-8adb-2f8d720f8528 for instance with vm_state building and task_state spawning. [ 1548.856953] env[63297]: DEBUG nova.compute.manager [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Received event network-changed-287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.856953] env[63297]: DEBUG nova.compute.manager [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Refreshing instance network info cache due to event network-changed-287bdace-3df3-414a-8adb-2f8d720f8528. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1548.856953] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] Acquiring lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.962985] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697803, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.098693] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697804, 'name': Rename_Task, 'duration_secs': 0.162737} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.099013] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1549.099274] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8dabea5-4f1a-4abc-9c79-f80341b03ccf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.104764] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Releasing lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.106050] env[63297]: DEBUG nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Instance network_info: |[{"id": "287bdace-3df3-414a-8adb-2f8d720f8528", "address": "fa:16:3e:ec:df:59", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap287bdace-3d", "ovs_interfaceid": "287bdace-3df3-414a-8adb-2f8d720f8528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1549.106754] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] Acquired lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.107035] env[63297]: DEBUG nova.network.neutron [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Refreshing network info cache for port 287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1549.108516] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:df:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '287bdace-3df3-414a-8adb-2f8d720f8528', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1549.116316] env[63297]: DEBUG oslo.service.loopingcall [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1549.116974] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1549.116974] env[63297]: value = "task-1697806" [ 1549.116974] env[63297]: _type = "Task" [ 1549.116974] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.117719] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1549.118513] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55e99c55-af1f-48b4-962c-498abd01cae0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.145250] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697806, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.146870] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1549.146870] env[63297]: value = "task-1697807" [ 1549.146870] env[63297]: _type = "Task" [ 1549.146870] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.158328] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697807, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.187758] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697805, 'name': ReconfigVM_Task, 'duration_secs': 0.188099} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.188221] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1549.188429] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f10e71db-9419-42f2-9a67-eb4e84a9f4cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.196534] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1549.196534] env[63297]: value = "task-1697808" [ 1549.196534] env[63297]: _type = "Task" [ 1549.196534] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.206948] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.218031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquiring lock "be532612-7192-4771-a3dc-25bd1dc6be6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.218031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "be532612-7192-4771-a3dc-25bd1dc6be6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.327114] env[63297]: DEBUG nova.compute.utils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1549.332479] env[63297]: DEBUG nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1549.332669] env[63297]: DEBUG nova.network.neutron [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1549.439587] env[63297]: DEBUG nova.policy [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0da2fdb3c81747698f971951c5e0068b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc8039a70b34a269d3aed1ecb558b7e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1549.468054] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532282} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.468490] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1/57d93827-2a5a-4f12-a74b-147a1a934dd1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1549.468746] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1549.469069] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eaafadeb-5007-4377-a8b3-7a4314b7acdb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.480132] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1549.480132] env[63297]: value = "task-1697809" [ 1549.480132] env[63297]: _type = "Task" [ 1549.480132] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.490015] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.631083] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697806, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.656161] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697807, 'name': CreateVM_Task, 'duration_secs': 0.40358} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.659128] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1549.660163] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.660339] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.660659] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1549.660969] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c0603a1-24cb-4ef2-83eb-a4fe2a09baf4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.665727] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1549.665727] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e85199-1634-6a8c-d494-e0c768df1926" [ 1549.665727] env[63297]: _type = "Task" [ 1549.665727] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.671105] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d892f71a-0e64-41b2-834a-ab2e513a02b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.678055] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e85199-1634-6a8c-d494-e0c768df1926, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.682920] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eed5d3c-9c56-4301-a71f-2c78ba5f4a40 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.725597] env[63297]: DEBUG nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1549.732658] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1ba839-b942-454f-90a4-c4a4b2018bb3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.741349] env[63297]: DEBUG oslo_vmware.api [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697808, 'name': PowerOnVM_Task, 'duration_secs': 0.479332} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.743746] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1549.746598] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9884c13-af5d-4f5f-8b16-cef336c09650 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.752156] env[63297]: DEBUG nova.compute.manager [None req-ba2eab90-b02f-4faa-ac74-ddb6fee434cd tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1549.752960] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f178e81-d2d8-4a6b-8ee0-7bc8e77c8572 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.767135] env[63297]: DEBUG nova.compute.provider_tree [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1549.835024] env[63297]: DEBUG nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1549.879246] env[63297]: DEBUG nova.network.neutron [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Successfully created port: 82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1549.992897] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085587} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.993725] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1549.994640] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7fcbf9-ad08-41ad-a091-83f10f29f9db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.018388] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1/57d93827-2a5a-4f12-a74b-147a1a934dd1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1550.018735] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82137c6e-962f-4e5b-a368-6cfcc72a4c4d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.036984] env[63297]: DEBUG nova.network.neutron [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updated VIF entry in instance network info cache for port 287bdace-3df3-414a-8adb-2f8d720f8528. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1550.037355] env[63297]: DEBUG nova.network.neutron [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updating instance_info_cache with network_info: [{"id": "287bdace-3df3-414a-8adb-2f8d720f8528", "address": "fa:16:3e:ec:df:59", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap287bdace-3d", "ovs_interfaceid": "287bdace-3df3-414a-8adb-2f8d720f8528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.044906] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1550.044906] env[63297]: value = "task-1697810" [ 1550.044906] env[63297]: _type = "Task" [ 1550.044906] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.054234] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697810, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.130039] env[63297]: DEBUG oslo_vmware.api [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697806, 'name': PowerOnVM_Task, 'duration_secs': 0.602962} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.130477] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1550.176613] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e85199-1634-6a8c-d494-e0c768df1926, 'name': SearchDatastore_Task, 'duration_secs': 0.011983} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.176947] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.177204] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1550.177501] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.177686] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.177881] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1550.178171] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-294e1334-c568-41e2-b3d6-ee3a5d5b2ca7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.189784] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1550.189887] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1550.190693] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2f1ae38-0af5-4aa7-b7db-d3c3ecbfcc65 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.198676] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1550.198676] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cf839d-7981-f215-4445-e1f6df9230fc" [ 1550.198676] env[63297]: _type = "Task" [ 1550.198676] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.207387] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cf839d-7981-f215-4445-e1f6df9230fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.251064] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.255424] env[63297]: DEBUG nova.compute.manager [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1550.256659] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6b042f-25af-487c-abf1-b148d4222416 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.275777] env[63297]: DEBUG nova.scheduler.client.report [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1550.541026] env[63297]: DEBUG oslo_concurrency.lockutils [req-dd44466c-cafb-44b6-bfb6-9cb906dcc2c7 req-b6654a2b-59b3-4c37-a0d7-a647c2922c32 service nova] Releasing lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.555219] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697810, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.708949] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cf839d-7981-f215-4445-e1f6df9230fc, 'name': SearchDatastore_Task, 'duration_secs': 0.009266} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.709789] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7d19caf-70d6-4b9d-a5d2-00da19cee130 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.715496] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1550.715496] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de4084-0fe5-55f5-2c44-fdaa0ae0c4e6" [ 1550.715496] env[63297]: _type = "Task" [ 1550.715496] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.724257] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de4084-0fe5-55f5-2c44-fdaa0ae0c4e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.777437] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ea127a33-161b-40c0-9a6a-f5e61916e4ed tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 47.785s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.783735] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.960s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.786471] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.573s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.786830] env[63297]: DEBUG nova.objects.instance [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lazy-loading 'resources' on Instance uuid 5cdb44c7-3dc1-4bce-8864-a1a40150e730 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1550.788839] env[63297]: INFO nova.compute.manager [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Unrescuing [ 1550.789092] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.789271] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.789407] env[63297]: DEBUG nova.network.neutron [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1550.807028] env[63297]: INFO nova.scheduler.client.report [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Deleted allocations for instance 5a868570-7504-4262-80b2-a458c219e689 [ 1550.843832] env[63297]: DEBUG nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1550.874086] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1550.874429] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1550.874595] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1550.874859] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1550.874946] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1550.876236] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1550.876582] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1550.876787] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1550.876995] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1550.877331] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1550.877573] env[63297]: DEBUG nova.virt.hardware [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1550.879405] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ab23f8-dfee-4cd6-a802-20e79d10e095 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.888748] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda04cd8-e8d4-44cc-b2f6-afafb1cb1911 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.056406] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697810, 'name': ReconfigVM_Task, 'duration_secs': 0.69703} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.056701] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1/57d93827-2a5a-4f12-a74b-147a1a934dd1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.057332] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0276d60f-f3fd-4646-96ac-e0632718a37b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.063785] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1551.063785] env[63297]: value = "task-1697811" [ 1551.063785] env[63297]: _type = "Task" [ 1551.063785] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.076851] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697811, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.228481] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52de4084-0fe5-55f5-2c44-fdaa0ae0c4e6, 'name': SearchDatastore_Task, 'duration_secs': 0.010429} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.228789] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.229060] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 0b7b9f1b-d277-4219-92fb-e35a8b867e77/0b7b9f1b-d277-4219-92fb-e35a8b867e77.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1551.229324] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b91a346-5403-4f80-8bac-d8971683be67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.236069] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1551.236069] env[63297]: value = "task-1697812" [ 1551.236069] env[63297]: _type = "Task" [ 1551.236069] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.244682] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697812, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.315527] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1065ae08-7b0b-4a40-9c99-d40ec3d4d40d tempest-ServerAddressesNegativeTestJSON-1873422599 tempest-ServerAddressesNegativeTestJSON-1873422599-project-member] Lock "5a868570-7504-4262-80b2-a458c219e689" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.579s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.576451] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697811, 'name': Rename_Task, 'duration_secs': 0.163469} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.581270] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1551.588506] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f94b6ec7-5f97-499b-8535-c7e3f741945a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.594468] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1551.594468] env[63297]: value = "task-1697813" [ 1551.594468] env[63297]: _type = "Task" [ 1551.594468] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.604229] env[63297]: DEBUG nova.compute.manager [req-3c78a28c-ba61-4e12-839b-d7a8d6bd6ce0 req-aacaeeca-2a40-4089-8fa3-25346bd4e89b service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Received event network-vif-plugged-82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1551.604458] env[63297]: DEBUG oslo_concurrency.lockutils [req-3c78a28c-ba61-4e12-839b-d7a8d6bd6ce0 req-aacaeeca-2a40-4089-8fa3-25346bd4e89b service nova] Acquiring lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.604680] env[63297]: DEBUG oslo_concurrency.lockutils [req-3c78a28c-ba61-4e12-839b-d7a8d6bd6ce0 req-aacaeeca-2a40-4089-8fa3-25346bd4e89b service nova] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.604931] env[63297]: DEBUG oslo_concurrency.lockutils [req-3c78a28c-ba61-4e12-839b-d7a8d6bd6ce0 req-aacaeeca-2a40-4089-8fa3-25346bd4e89b service nova] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.605333] env[63297]: DEBUG nova.compute.manager [req-3c78a28c-ba61-4e12-839b-d7a8d6bd6ce0 req-aacaeeca-2a40-4089-8fa3-25346bd4e89b service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] No waiting events found dispatching network-vif-plugged-82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1551.605333] env[63297]: WARNING nova.compute.manager [req-3c78a28c-ba61-4e12-839b-d7a8d6bd6ce0 req-aacaeeca-2a40-4089-8fa3-25346bd4e89b service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Received unexpected event network-vif-plugged-82e9569c-63e6-41d2-ac37-b8d9b3d5378f for instance with vm_state building and task_state spawning. [ 1551.613795] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697813, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.646629] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe3fedd-6fd8-4970-903e-eb9e2b77895f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.656749] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244a648d-a7b4-4cae-861a-b94ab6474775 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.702023] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02383be-9400-4b75-b28f-bca2a4d4d706 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.711653] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550c73a4-61dd-448e-b2a0-b0e4ae9d4dd7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.735332] env[63297]: DEBUG nova.compute.provider_tree [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1551.749142] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697812, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.818779] env[63297]: DEBUG nova.network.neutron [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Updating instance_info_cache with network_info: [{"id": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "address": "fa:16:3e:10:ae:3e", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6875a0ac-55", "ovs_interfaceid": "6875a0ac-55bd-4388-9c3d-5105d4cf1c7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.878701] env[63297]: DEBUG nova.network.neutron [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Successfully updated port: 82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1552.107798] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.243240] env[63297]: DEBUG nova.scheduler.client.report [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1552.251676] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697812, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520403} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.252247] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 0b7b9f1b-d277-4219-92fb-e35a8b867e77/0b7b9f1b-d277-4219-92fb-e35a8b867e77.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1552.252480] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1552.252746] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bfa8e12-7c96-4de4-aacf-b4134925cbc4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.261824] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1552.261824] env[63297]: value = "task-1697814" [ 1552.261824] env[63297]: _type = "Task" [ 1552.261824] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.274571] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.322418] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "refresh_cache-42d872d6-da12-474b-8741-1d991d507cfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.323150] env[63297]: DEBUG nova.objects.instance [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lazy-loading 'flavor' on Instance uuid 42d872d6-da12-474b-8741-1d991d507cfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1552.383639] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.383639] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.383639] env[63297]: DEBUG nova.network.neutron [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1552.520633] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cfc9c7-dec1-47f1-a1c9-e0608c26b086 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.534569] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7db89-b935-449b-a7e1-e959c83d39aa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Suspending the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1552.534885] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e3cc957a-0621-45b9-9981-eec332007426 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.542757] env[63297]: DEBUG oslo_vmware.api [None req-e3b7db89-b935-449b-a7e1-e959c83d39aa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1552.542757] env[63297]: value = "task-1697815" [ 1552.542757] env[63297]: _type = "Task" [ 1552.542757] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.554761] env[63297]: DEBUG oslo_vmware.api [None req-e3b7db89-b935-449b-a7e1-e959c83d39aa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697815, 'name': SuspendVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.608922] env[63297]: DEBUG oslo_vmware.api [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697813, 'name': PowerOnVM_Task, 'duration_secs': 0.755812} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.609823] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1552.610216] env[63297]: DEBUG nova.compute.manager [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1552.611678] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2344f148-4d98-4644-909e-69985f3ecf88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.756124] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.967s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.756124] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.581s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.756124] env[63297]: DEBUG nova.objects.instance [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lazy-loading 'resources' on Instance uuid b14e8466-68ab-4705-a439-6db961a149b0 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1552.775842] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081437} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.775842] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1552.775842] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5552eb9-8582-4211-84d8-efb9517875d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.803339] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 0b7b9f1b-d277-4219-92fb-e35a8b867e77/0b7b9f1b-d277-4219-92fb-e35a8b867e77.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1552.804601] env[63297]: INFO nova.scheduler.client.report [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted allocations for instance 5cdb44c7-3dc1-4bce-8864-a1a40150e730 [ 1552.809228] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33723c2b-d990-4b19-801c-bfc391c4400e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.829972] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b878c56b-c7c2-4336-95f1-398eb5b104f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.836350] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1552.836350] env[63297]: value = "task-1697816" [ 1552.836350] env[63297]: _type = "Task" [ 1552.836350] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.862838] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1552.863789] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6af47584-b269-409a-b125-cd66154a77fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.873034] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697816, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.873596] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1552.873596] env[63297]: value = "task-1697817" [ 1552.873596] env[63297]: _type = "Task" [ 1552.873596] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.885764] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.955950] env[63297]: DEBUG nova.network.neutron [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1553.057754] env[63297]: DEBUG oslo_vmware.api [None req-e3b7db89-b935-449b-a7e1-e959c83d39aa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697815, 'name': SuspendVM_Task} progress is 83%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.137038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.315079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a05f34d3-7415-4365-839a-2af9d3e18c9a tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "5cdb44c7-3dc1-4bce-8864-a1a40150e730" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.282s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.316874] env[63297]: DEBUG nova.network.neutron [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Updating instance_info_cache with network_info: [{"id": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "address": "fa:16:3e:c5:63:c0", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82e9569c-63", "ovs_interfaceid": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.363292] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697816, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.391808] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697817, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.566206] env[63297]: DEBUG oslo_vmware.api [None req-e3b7db89-b935-449b-a7e1-e959c83d39aa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697815, 'name': SuspendVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.638941] env[63297]: DEBUG nova.compute.manager [req-ae0b0041-9b8a-4093-9a8b-e677d4bc956e req-9b85da95-cdc7-4c59-8889-f25995302992 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Received event network-changed-82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1553.639261] env[63297]: DEBUG nova.compute.manager [req-ae0b0041-9b8a-4093-9a8b-e677d4bc956e req-9b85da95-cdc7-4c59-8889-f25995302992 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Refreshing instance network info cache due to event network-changed-82e9569c-63e6-41d2-ac37-b8d9b3d5378f. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1553.639472] env[63297]: DEBUG oslo_concurrency.lockutils [req-ae0b0041-9b8a-4093-9a8b-e677d4bc956e req-9b85da95-cdc7-4c59-8889-f25995302992 service nova] Acquiring lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.644412] env[63297]: DEBUG oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ec85f-2d79-a2e2-eaa1-ab5a62d31056/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1553.645872] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2383ac26-3d65-42b5-9982-5a78072f127f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.650678] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88fbf4b-4b57-493a-aa78-02bbce0b627c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.660021] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fd2e4c-ad1c-4315-b2fd-c5b614e3850c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.666024] env[63297]: DEBUG oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ec85f-2d79-a2e2-eaa1-ab5a62d31056/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1553.666206] env[63297]: ERROR oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ec85f-2d79-a2e2-eaa1-ab5a62d31056/disk-0.vmdk due to incomplete transfer. [ 1553.666449] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6cdd954d-b46b-492f-9df1-6398b73d34a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.699803] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee59693-21c7-474e-9eaf-37a2d3363dfa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.702897] env[63297]: DEBUG oslo_vmware.rw_handles [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ec85f-2d79-a2e2-eaa1-ab5a62d31056/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1553.703142] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Uploaded image dc932a9e-d404-4f38-8fb5-26f5f3f4b879 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1553.704786] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1553.705085] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ee0b20c2-cdac-40e3-bb79-4d120117044f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.717651] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce38e619-fd4e-4838-a12d-9dace6ec2fc9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.724055] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1553.724055] env[63297]: value = "task-1697818" [ 1553.724055] env[63297]: _type = "Task" [ 1553.724055] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.742515] env[63297]: DEBUG nova.compute.provider_tree [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1553.753781] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697818, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.821156] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.822809] env[63297]: DEBUG nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Instance network_info: |[{"id": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "address": "fa:16:3e:c5:63:c0", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82e9569c-63", "ovs_interfaceid": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1553.822809] env[63297]: DEBUG oslo_concurrency.lockutils [req-ae0b0041-9b8a-4093-9a8b-e677d4bc956e req-9b85da95-cdc7-4c59-8889-f25995302992 service nova] Acquired lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.822809] env[63297]: DEBUG nova.network.neutron [req-ae0b0041-9b8a-4093-9a8b-e677d4bc956e req-9b85da95-cdc7-4c59-8889-f25995302992 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Refreshing network info cache for port 82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.823980] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:63:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82e9569c-63e6-41d2-ac37-b8d9b3d5378f', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1553.832591] env[63297]: DEBUG oslo.service.loopingcall [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1553.833748] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1553.833748] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8479b119-454c-4c48-9232-8cb09c77d9b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.865210] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697816, 'name': ReconfigVM_Task, 'duration_secs': 0.683825} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.866890] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 0b7b9f1b-d277-4219-92fb-e35a8b867e77/0b7b9f1b-d277-4219-92fb-e35a8b867e77.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1553.867670] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1553.867670] env[63297]: value = "task-1697819" [ 1553.867670] env[63297]: _type = "Task" [ 1553.867670] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.867920] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52e010d2-ffba-4d4d-9ff4-16ce3029455c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.888660] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697819, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.889113] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1553.889113] env[63297]: value = "task-1697820" [ 1553.889113] env[63297]: _type = "Task" [ 1553.889113] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.910675] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697817, 'name': PowerOffVM_Task, 'duration_secs': 0.618178} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.915900] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1553.924437] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Reconfiguring VM instance instance-0000004c to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1553.925028] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697820, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.925434] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-275101c6-c9df-4d90-acf6-f62e3e0a0df8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.955579] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1553.955579] env[63297]: value = "task-1697821" [ 1553.955579] env[63297]: _type = "Task" [ 1553.955579] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.971713] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697821, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.057310] env[63297]: DEBUG oslo_vmware.api [None req-e3b7db89-b935-449b-a7e1-e959c83d39aa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697815, 'name': SuspendVM_Task, 'duration_secs': 1.040447} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.057310] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7db89-b935-449b-a7e1-e959c83d39aa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Suspended the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1554.057310] env[63297]: DEBUG nova.compute.manager [None req-e3b7db89-b935-449b-a7e1-e959c83d39aa tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1554.058535] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0930d5ed-9d7a-4eea-ae8b-8511ec270b5c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.221315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "57d93827-2a5a-4f12-a74b-147a1a934dd1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.221315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "57d93827-2a5a-4f12-a74b-147a1a934dd1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.221315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "57d93827-2a5a-4f12-a74b-147a1a934dd1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.221315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "57d93827-2a5a-4f12-a74b-147a1a934dd1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.221315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "57d93827-2a5a-4f12-a74b-147a1a934dd1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.224329] env[63297]: INFO nova.compute.manager [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Terminating instance [ 1554.229900] env[63297]: DEBUG nova.compute.manager [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1554.230315] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1554.231253] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a7fd70-1182-4bbd-af30-8d4ebc115318 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.240955] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697818, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.243742] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1554.244450] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d600305-793e-44bf-80d4-d00f67b496ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.254782] env[63297]: DEBUG oslo_vmware.api [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1554.254782] env[63297]: value = "task-1697822" [ 1554.254782] env[63297]: _type = "Task" [ 1554.254782] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.266818] env[63297]: DEBUG oslo_vmware.api [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.268567] env[63297]: ERROR nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [req-574a2e10-a35a-44c8-a9db-7e09cd9449be] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-574a2e10-a35a-44c8-a9db-7e09cd9449be"}]} [ 1554.288220] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1554.301220] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1554.301337] env[63297]: DEBUG nova.compute.provider_tree [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1554.315943] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1554.336866] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1554.385216] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697819, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.402901] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697820, 'name': Rename_Task, 'duration_secs': 0.324207} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.403340] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1554.403661] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-112b9a77-1ec7-41da-a5b7-b18ac9d989e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.416722] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1554.416722] env[63297]: value = "task-1697823" [ 1554.416722] env[63297]: _type = "Task" [ 1554.416722] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.430589] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.469197] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697821, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.678157] env[63297]: DEBUG nova.network.neutron [req-ae0b0041-9b8a-4093-9a8b-e677d4bc956e req-9b85da95-cdc7-4c59-8889-f25995302992 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Updated VIF entry in instance network info cache for port 82e9569c-63e6-41d2-ac37-b8d9b3d5378f. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.678522] env[63297]: DEBUG nova.network.neutron [req-ae0b0041-9b8a-4093-9a8b-e677d4bc956e req-9b85da95-cdc7-4c59-8889-f25995302992 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Updating instance_info_cache with network_info: [{"id": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "address": "fa:16:3e:c5:63:c0", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82e9569c-63", "ovs_interfaceid": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.691761] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7bee8a-0259-4890-9617-daa6fc3be65b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.705547] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f26a5f-abbe-4d12-a3fa-8d4c85aa94b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.719715] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "f5866b1e-cd77-464e-858e-eb14dab0637c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.720060] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f5866b1e-cd77-464e-858e-eb14dab0637c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.764419] env[63297]: DEBUG nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1554.771896] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f31ac5b-edb6-4d82-983b-274a7c96106e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.788470] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba37247-1d15-4407-89e7-6423ebd47ab6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.791717] env[63297]: DEBUG oslo_vmware.api [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697822, 'name': PowerOffVM_Task, 'duration_secs': 0.418059} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.791970] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697818, 'name': Destroy_Task, 'duration_secs': 0.918805} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.792506] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1554.792684] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1554.794136] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Destroyed the VM [ 1554.794136] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1554.794136] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3571fa3c-4fec-45ee-8e8a-43ced787de94 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.795608] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6e0205d6-3756-4e1e-8cf6-a54c1533562b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.805146] env[63297]: DEBUG nova.compute.provider_tree [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1554.813264] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1554.813264] env[63297]: value = "task-1697824" [ 1554.813264] env[63297]: _type = "Task" [ 1554.813264] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.823268] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697824, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.883812] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697819, 'name': CreateVM_Task, 'duration_secs': 0.85067} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.884102] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1554.885041] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.885312] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.886025] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1554.886573] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ebd911-c946-4e00-a137-2b77c1731745 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.889738] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1554.890025] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1554.890254] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleting the datastore file [datastore1] 57d93827-2a5a-4f12-a74b-147a1a934dd1 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1554.891071] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-083e119a-b879-4ecd-8d53-b78f253cba97 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.896122] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1554.896122] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fbe885-d9db-c4c9-866b-5dc4cb47f04a" [ 1554.896122] env[63297]: _type = "Task" [ 1554.896122] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.905125] env[63297]: DEBUG oslo_vmware.api [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1554.905125] env[63297]: value = "task-1697826" [ 1554.905125] env[63297]: _type = "Task" [ 1554.905125] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.913261] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fbe885-d9db-c4c9-866b-5dc4cb47f04a, 'name': SearchDatastore_Task, 'duration_secs': 0.011884} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.914052] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.914362] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1554.914964] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.914964] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.914964] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1554.915261] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dafaf3ae-3e68-4593-ad77-4b427c7e69d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.920259] env[63297]: DEBUG oslo_vmware.api [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.928093] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "a10df0e9-4278-48f1-b111-864ac793f630" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.928331] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a10df0e9-4278-48f1-b111-864ac793f630" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.936432] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697823, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.937381] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1554.939321] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1554.939321] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70f429f1-5a1a-4d08-916f-091671785d30 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.949478] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1554.949478] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52343d95-a307-bc16-78c5-a5461592c776" [ 1554.949478] env[63297]: _type = "Task" [ 1554.949478] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.956594] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52343d95-a307-bc16-78c5-a5461592c776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.966654] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697821, 'name': ReconfigVM_Task, 'duration_secs': 0.697329} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.967328] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Reconfigured VM instance instance-0000004c to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1554.969991] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1554.969991] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7701d435-701b-4d2c-9b8f-5c89a3bcbd69 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.977386] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1554.977386] env[63297]: value = "task-1697827" [ 1554.977386] env[63297]: _type = "Task" [ 1554.977386] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.988968] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.118806] env[63297]: INFO nova.compute.manager [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Resuming [ 1555.119504] env[63297]: DEBUG nova.objects.instance [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lazy-loading 'flavor' on Instance uuid fba9040d-f904-44a1-8785-14d4696ea939 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1555.181327] env[63297]: DEBUG oslo_concurrency.lockutils [req-ae0b0041-9b8a-4093-9a8b-e677d4bc956e req-9b85da95-cdc7-4c59-8889-f25995302992 service nova] Releasing lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.295732] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.325980] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697824, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.333330] env[63297]: ERROR nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] [req-6a7c3078-371d-431d-9b90-19b3ed4117da] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6a7c3078-371d-431d-9b90-19b3ed4117da"}]} [ 1555.354496] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1555.367638] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1555.367901] env[63297]: DEBUG nova.compute.provider_tree [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1555.382882] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1555.383142] env[63297]: DEBUG nova.compute.provider_tree [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 109 to 110 during operation: update_aggregates {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1555.401282] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1555.416027] env[63297]: DEBUG oslo_vmware.api [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268427} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.417157] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1555.417157] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1555.417157] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1555.417157] env[63297]: INFO nova.compute.manager [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1555.417157] env[63297]: DEBUG oslo.service.loopingcall [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1555.417476] env[63297]: DEBUG nova.compute.manager [-] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1555.417476] env[63297]: DEBUG nova.network.neutron [-] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1555.432470] env[63297]: DEBUG nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1555.435393] env[63297]: DEBUG oslo_vmware.api [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697823, 'name': PowerOnVM_Task, 'duration_secs': 0.655952} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.436251] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1555.436317] env[63297]: INFO nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1555.436504] env[63297]: DEBUG nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1555.437345] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f764bc5e-5ce7-4c17-92be-41c588204c0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.465500] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52343d95-a307-bc16-78c5-a5461592c776, 'name': SearchDatastore_Task, 'duration_secs': 0.01484} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.466405] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4490f4f-7cde-4a2d-b265-4416c38769c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.477022] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1555.477022] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52661c1b-a84c-4704-1813-50cd47ff358a" [ 1555.477022] env[63297]: _type = "Task" [ 1555.477022] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.486709] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52661c1b-a84c-4704-1813-50cd47ff358a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.494752] env[63297]: DEBUG oslo_vmware.api [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697827, 'name': PowerOnVM_Task, 'duration_secs': 0.483327} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.495022] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1555.495258] env[63297]: DEBUG nova.compute.manager [None req-1d809c26-cdc4-4c9c-a9d0-44aa9f5498c2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1555.496225] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27bb4bd6-212f-46e3-959d-eb732766b9e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.699322] env[63297]: DEBUG nova.compute.manager [req-b79d01a7-884c-4166-b9a4-48593abc50e6 req-0d9cd64e-c66b-4e2e-897d-d5f892e23d30 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Received event network-vif-deleted-fb3d9f25-b216-49b6-90c4-a53298983b44 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1555.699322] env[63297]: INFO nova.compute.manager [req-b79d01a7-884c-4166-b9a4-48593abc50e6 req-0d9cd64e-c66b-4e2e-897d-d5f892e23d30 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Neutron deleted interface fb3d9f25-b216-49b6-90c4-a53298983b44; detaching it from the instance and deleting it from the info cache [ 1555.699634] env[63297]: DEBUG nova.network.neutron [req-b79d01a7-884c-4166-b9a4-48593abc50e6 req-0d9cd64e-c66b-4e2e-897d-d5f892e23d30 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.775505] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5c2f9b-aa16-4bcc-a043-1b692515b65b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.784483] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e95c0fc-4e11-4b04-8253-e38c8acb55ed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.821741] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdc17bc-5b9e-4f23-ba41-c00ab5d1a071 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.831824] env[63297]: DEBUG oslo_vmware.api [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697824, 'name': RemoveSnapshot_Task, 'duration_secs': 0.659765} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.833041] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4feab9f7-076a-43c1-9c41-8cda1de0bc61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.836795] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1555.837082] env[63297]: INFO nova.compute.manager [None req-debbeabe-fd19-46c8-93ab-37ac52ee9ad0 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Took 19.43 seconds to snapshot the instance on the hypervisor. [ 1555.844764] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "310cf8d4-613a-4c35-b118-7d79138e4799" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.845022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "310cf8d4-613a-4c35-b118-7d79138e4799" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.858219] env[63297]: DEBUG nova.compute.provider_tree [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1555.956838] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.968932] env[63297]: INFO nova.compute.manager [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Took 27.05 seconds to build instance. [ 1555.985843] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52661c1b-a84c-4704-1813-50cd47ff358a, 'name': SearchDatastore_Task, 'duration_secs': 0.022984} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.986061] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.986282] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 020b06c5-44e2-4f74-a1dc-d7557db3537e/020b06c5-44e2-4f74-a1dc-d7557db3537e.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1555.986778] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fef04195-7fee-459b-800f-815aea376e71 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.995934] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1555.995934] env[63297]: value = "task-1697828" [ 1555.995934] env[63297]: _type = "Task" [ 1555.995934] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.006814] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.129365] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.129591] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquired lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.129729] env[63297]: DEBUG nova.network.neutron [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1556.179413] env[63297]: DEBUG nova.network.neutron [-] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.202318] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3a4b487-d466-424d-9638-e888bfb28528 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.213603] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fa140f-49a9-4a99-aeae-5e98b12e3677 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.259612] env[63297]: DEBUG nova.compute.manager [req-b79d01a7-884c-4166-b9a4-48593abc50e6 req-0d9cd64e-c66b-4e2e-897d-d5f892e23d30 service nova] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Detach interface failed, port_id=fb3d9f25-b216-49b6-90c4-a53298983b44, reason: Instance 57d93827-2a5a-4f12-a74b-147a1a934dd1 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1556.347382] env[63297]: DEBUG nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1556.412435] env[63297]: DEBUG nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 110 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1556.412435] env[63297]: DEBUG nova.compute.provider_tree [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 110 to 111 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1556.412435] env[63297]: DEBUG nova.compute.provider_tree [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1556.471223] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec3fd698-c7e3-4c9a-89f6-1da22a035900 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.559s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.512139] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697828, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.682734] env[63297]: INFO nova.compute.manager [-] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Took 1.27 seconds to deallocate network for instance. [ 1556.876274] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.893331] env[63297]: DEBUG nova.network.neutron [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [{"id": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "address": "fa:16:3e:13:0a:af", "network": {"id": "77c68484-41bb-4cce-bb80-50e08cc5af11", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-795079396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf40bf7f33349cb8bb098887d1244ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd69c1e-7e", "ovs_interfaceid": "1dd69c1e-7eee-4b1b-b4a7-421ab5477495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.916147] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.160s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.920135] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.515s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.920455] env[63297]: DEBUG nova.objects.instance [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lazy-loading 'resources' on Instance uuid 89c9cd40-585e-4ae6-88b3-1a33a94c3b52 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1556.945691] env[63297]: INFO nova.scheduler.client.report [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Deleted allocations for instance b14e8466-68ab-4705-a439-6db961a149b0 [ 1557.009986] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697828, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.190174] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.396749] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Releasing lock "refresh_cache-fba9040d-f904-44a1-8785-14d4696ea939" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.397712] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19811998-1586-4b33-b973-488acecf9c58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.405789] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Resuming the VM {{(pid=63297) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1557.406107] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4f731fe-34f7-44f5-8a12-7861c8db276e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.416199] env[63297]: DEBUG oslo_vmware.api [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1557.416199] env[63297]: value = "task-1697829" [ 1557.416199] env[63297]: _type = "Task" [ 1557.416199] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.426216] env[63297]: DEBUG oslo_vmware.api [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.456770] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b20b5d8e-77c0-4ddd-a5a9-65117e9a39e8 tempest-ServersAdminTestJSON-1877317013 tempest-ServersAdminTestJSON-1877317013-project-member] Lock "b14e8466-68ab-4705-a439-6db961a149b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.332s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.511544] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697828, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.02605} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.514746] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 020b06c5-44e2-4f74-a1dc-d7557db3537e/020b06c5-44e2-4f74-a1dc-d7557db3537e.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1557.515380] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1557.516434] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c994315e-3873-44ce-b9d4-b9a4948db34d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.532116] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1557.532116] env[63297]: value = "task-1697830" [ 1557.532116] env[63297]: _type = "Task" [ 1557.532116] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.543225] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.781927] env[63297]: DEBUG nova.compute.manager [req-d0be1533-5b4c-4176-a90b-a99cf1f4bed7 req-a982ad6c-a8e8-4264-92a4-54f58eedf887 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Received event network-changed-287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1557.783012] env[63297]: DEBUG nova.compute.manager [req-d0be1533-5b4c-4176-a90b-a99cf1f4bed7 req-a982ad6c-a8e8-4264-92a4-54f58eedf887 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Refreshing instance network info cache due to event network-changed-287bdace-3df3-414a-8adb-2f8d720f8528. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1557.783012] env[63297]: DEBUG oslo_concurrency.lockutils [req-d0be1533-5b4c-4176-a90b-a99cf1f4bed7 req-a982ad6c-a8e8-4264-92a4-54f58eedf887 service nova] Acquiring lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.783012] env[63297]: DEBUG oslo_concurrency.lockutils [req-d0be1533-5b4c-4176-a90b-a99cf1f4bed7 req-a982ad6c-a8e8-4264-92a4-54f58eedf887 service nova] Acquired lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.783012] env[63297]: DEBUG nova.network.neutron [req-d0be1533-5b4c-4176-a90b-a99cf1f4bed7 req-a982ad6c-a8e8-4264-92a4-54f58eedf887 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Refreshing network info cache for port 287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1557.823545] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034cc971-c6e9-4802-a0f1-9d9019a20208 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.834473] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4131188e-11f9-4c23-ba8f-bdb10f32b6c3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.870612] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8b5596-34d2-4aea-9906-ab28463a07c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.880424] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba5dffe-724e-4598-b8f5-6f17646c122c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.901952] env[63297]: DEBUG nova.compute.provider_tree [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1557.936236] env[63297]: DEBUG oslo_vmware.api [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697829, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.044483] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082186} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.045476] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1558.045956] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff0c31b-b8f1-4dd0-849b-4c5cd5472db3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.072361] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 020b06c5-44e2-4f74-a1dc-d7557db3537e/020b06c5-44e2-4f74-a1dc-d7557db3537e.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1558.073411] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3fca99e-2b18-4943-952c-b41b1544457d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.096529] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1558.096529] env[63297]: value = "task-1697831" [ 1558.096529] env[63297]: _type = "Task" [ 1558.096529] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.105636] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697831, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.431697] env[63297]: DEBUG oslo_vmware.api [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697829, 'name': PowerOnVM_Task, 'duration_secs': 0.584273} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.432764] env[63297]: ERROR nova.scheduler.client.report [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] [req-0dc19f4f-0a58-4fe4-a6d0-b071bcd7bd8e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0dc19f4f-0a58-4fe4-a6d0-b071bcd7bd8e"}]} [ 1558.433118] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Resumed the VM {{(pid=63297) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1558.433311] env[63297]: DEBUG nova.compute.manager [None req-4e706488-ce31-4477-8f31-7cf39c5072a1 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1558.440295] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dd92a5-a1d2-47ec-9403-579c065b749d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.460887] env[63297]: DEBUG nova.scheduler.client.report [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1558.485271] env[63297]: DEBUG nova.scheduler.client.report [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1558.485760] env[63297]: DEBUG nova.compute.provider_tree [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1558.499987] env[63297]: DEBUG nova.scheduler.client.report [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1558.525708] env[63297]: DEBUG nova.scheduler.client.report [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1558.529205] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquiring lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.529205] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.609634] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697831, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.898256] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1be1f43-bd1b-4b23-8ca4-b56c1ab51d67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.907390] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905fb989-629c-415d-b238-36a5e86f2e08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.945220] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc96b469-f656-4a94-a354-7203c77c9176 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.955791] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06181ea6-8377-4bd6-9d1a-2071b3c5e118 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.975753] env[63297]: DEBUG nova.compute.provider_tree [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1559.021330] env[63297]: DEBUG nova.network.neutron [req-d0be1533-5b4c-4176-a90b-a99cf1f4bed7 req-a982ad6c-a8e8-4264-92a4-54f58eedf887 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updated VIF entry in instance network info cache for port 287bdace-3df3-414a-8adb-2f8d720f8528. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1559.021852] env[63297]: DEBUG nova.network.neutron [req-d0be1533-5b4c-4176-a90b-a99cf1f4bed7 req-a982ad6c-a8e8-4264-92a4-54f58eedf887 service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updating instance_info_cache with network_info: [{"id": "287bdace-3df3-414a-8adb-2f8d720f8528", "address": "fa:16:3e:ec:df:59", "network": {"id": "437d4b1d-796c-43d8-8258-df0e6b4e36d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-940883115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731a719bb2a44a53985d10e02f9397cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap287bdace-3d", "ovs_interfaceid": "287bdace-3df3-414a-8adb-2f8d720f8528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.034849] env[63297]: DEBUG nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1559.109769] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697831, 'name': ReconfigVM_Task, 'duration_secs': 0.984957} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.109769] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 020b06c5-44e2-4f74-a1dc-d7557db3537e/020b06c5-44e2-4f74-a1dc-d7557db3537e.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1559.110276] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0c822c6-ab3f-4920-99d3-7c477f8edeae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.117492] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1559.117492] env[63297]: value = "task-1697832" [ 1559.117492] env[63297]: _type = "Task" [ 1559.117492] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.127691] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697832, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.522942] env[63297]: DEBUG nova.scheduler.client.report [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1559.523428] env[63297]: DEBUG nova.compute.provider_tree [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 112 to 113 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1559.524502] env[63297]: DEBUG nova.compute.provider_tree [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1559.532816] env[63297]: DEBUG oslo_concurrency.lockutils [req-d0be1533-5b4c-4176-a90b-a99cf1f4bed7 req-a982ad6c-a8e8-4264-92a4-54f58eedf887 service nova] Releasing lock "refresh_cache-0b7b9f1b-d277-4219-92fb-e35a8b867e77" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.568872] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.633391] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697832, 'name': Rename_Task, 'duration_secs': 0.436743} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.633747] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1559.634160] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-642a6d8b-85a8-4176-b0f9-0193947b3ece {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.643739] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1559.643739] env[63297]: value = "task-1697833" [ 1559.643739] env[63297]: _type = "Task" [ 1559.643739] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.653096] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697833, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.033402] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.115s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.036612] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.785s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.037583] env[63297]: INFO nova.compute.claims [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1560.059241] env[63297]: INFO nova.scheduler.client.report [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Deleted allocations for instance 89c9cd40-585e-4ae6-88b3-1a33a94c3b52 [ 1560.164429] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697833, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.207821] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.207982] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.572909] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d7c4f855-5a50-4c5a-bfe6-c6671cd89033 tempest-ServerRescueTestJSON-1283339724 tempest-ServerRescueTestJSON-1283339724-project-member] Lock "89c9cd40-585e-4ae6-88b3-1a33a94c3b52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.791s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.660025] env[63297]: DEBUG oslo_vmware.api [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697833, 'name': PowerOnVM_Task, 'duration_secs': 0.779779} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.661159] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.661159] env[63297]: INFO nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Took 9.82 seconds to spawn the instance on the hypervisor. [ 1560.661159] env[63297]: DEBUG nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1560.664719] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d793fbcc-6f12-40f4-989a-e67487052d11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.715134] env[63297]: DEBUG nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1561.192606] env[63297]: INFO nova.compute.manager [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Took 27.07 seconds to build instance. [ 1561.247783] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.447970] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f9cc75-6eac-4fa8-ab70-510b7aa0d9c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.458935] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be7edb6-fe64-4ca0-acf1-e0ce81a1582c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.497083] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebbaf58-cda9-4adb-affc-2b99abdc10fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.506831] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e610ec-a22c-4249-b5be-b9bc2a295162 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.526752] env[63297]: DEBUG nova.compute.provider_tree [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.696573] env[63297]: DEBUG oslo_concurrency.lockutils [None req-18df464e-506c-436a-9a06-8e2898c63453 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.592s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.097589] env[63297]: DEBUG nova.scheduler.client.report [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1562.327099] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.327418] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.327544] env[63297]: INFO nova.compute.manager [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Shelving [ 1562.536329] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.536844] env[63297]: DEBUG nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1562.539492] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.405s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.539675] env[63297]: DEBUG nova.objects.instance [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1562.835825] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1562.836217] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-049456b5-6a63-4d7a-87e1-dcdbde467257 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.845189] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1562.845189] env[63297]: value = "task-1697834" [ 1562.845189] env[63297]: _type = "Task" [ 1562.845189] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.863815] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.044456] env[63297]: DEBUG nova.compute.utils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1563.052806] env[63297]: DEBUG nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1563.052806] env[63297]: DEBUG nova.network.neutron [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1563.110417] env[63297]: DEBUG nova.policy [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34d82924bd1f4b29a654eea53c74302d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c19fb23a8e7c4651bca64357acf03ea7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1563.360647] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697834, 'name': PowerOffVM_Task, 'duration_secs': 0.23355} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.360647] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1563.360647] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e78f4e8-8ef8-4fd1-a164-3f4716e90c4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.382031] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6673ddc0-075f-4e7e-ab16-2e8988597a23 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.550769] env[63297]: DEBUG nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1563.553969] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8a5b0202-6d5a-49bb-b5a6-4f196bf267d3 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.555312] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.260s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.557181] env[63297]: INFO nova.compute.claims [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1563.610713] env[63297]: DEBUG nova.network.neutron [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Successfully created port: a6bfaa78-84a9-413e-b35f-f3d7485fed94 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1563.895715] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1563.896102] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bfcdf6a4-f43c-454f-b3fb-f7fe2a2a27f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.904876] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1563.904876] env[63297]: value = "task-1697835" [ 1563.904876] env[63297]: _type = "Task" [ 1563.904876] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.916327] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697835, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.418444] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697835, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.566635] env[63297]: DEBUG nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1564.598931] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1564.599298] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1564.599668] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1564.599890] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1564.599999] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1564.600169] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1564.600434] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1564.600825] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1564.600998] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1564.601184] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1564.601365] env[63297]: DEBUG nova.virt.hardware [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1564.604646] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2678a1a-1085-4b8b-8939-9f6061d03af5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.615513] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205b30de-0fe9-44c8-b32e-25a7c1281160 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.924354] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697835, 'name': CreateSnapshot_Task, 'duration_secs': 0.997358} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.924559] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1564.925368] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165c3700-590a-4abc-8657-872562ed274b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.933560] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0f8d9b-faca-430c-b534-f8915dcf348d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.949169] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66655704-c1e3-4bad-b6a4-c208f911e032 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.990793] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ed40e4-f179-49d2-b412-86ee2852650f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.000010] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8f34af-326d-4be1-81ab-ba8a9eaaf23d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.016401] env[63297]: DEBUG nova.compute.provider_tree [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1565.151236] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "fba9040d-f904-44a1-8785-14d4696ea939" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.151236] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.151236] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "fba9040d-f904-44a1-8785-14d4696ea939-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.151236] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.151236] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.154028] env[63297]: INFO nova.compute.manager [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Terminating instance [ 1565.156217] env[63297]: DEBUG nova.compute.manager [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1565.156377] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1565.157234] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8e603a-4552-4906-8aa4-9ed169612fe7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.170539] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1565.170821] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efa3fda7-38d7-4945-8744-c648005eeba2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.180868] env[63297]: DEBUG oslo_vmware.api [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1565.180868] env[63297]: value = "task-1697836" [ 1565.180868] env[63297]: _type = "Task" [ 1565.180868] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.194222] env[63297]: DEBUG oslo_vmware.api [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697836, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.361140] env[63297]: DEBUG nova.compute.manager [req-c624f528-1908-43db-94d7-fbd1b5c40776 req-e2828668-1c0d-4987-bbc0-9e5cd4dc65f8 service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Received event network-vif-plugged-a6bfaa78-84a9-413e-b35f-f3d7485fed94 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1565.361394] env[63297]: DEBUG oslo_concurrency.lockutils [req-c624f528-1908-43db-94d7-fbd1b5c40776 req-e2828668-1c0d-4987-bbc0-9e5cd4dc65f8 service nova] Acquiring lock "be532612-7192-4771-a3dc-25bd1dc6be6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.361611] env[63297]: DEBUG oslo_concurrency.lockutils [req-c624f528-1908-43db-94d7-fbd1b5c40776 req-e2828668-1c0d-4987-bbc0-9e5cd4dc65f8 service nova] Lock "be532612-7192-4771-a3dc-25bd1dc6be6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.361780] env[63297]: DEBUG oslo_concurrency.lockutils [req-c624f528-1908-43db-94d7-fbd1b5c40776 req-e2828668-1c0d-4987-bbc0-9e5cd4dc65f8 service nova] Lock "be532612-7192-4771-a3dc-25bd1dc6be6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.361952] env[63297]: DEBUG nova.compute.manager [req-c624f528-1908-43db-94d7-fbd1b5c40776 req-e2828668-1c0d-4987-bbc0-9e5cd4dc65f8 service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] No waiting events found dispatching network-vif-plugged-a6bfaa78-84a9-413e-b35f-f3d7485fed94 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1565.362137] env[63297]: WARNING nova.compute.manager [req-c624f528-1908-43db-94d7-fbd1b5c40776 req-e2828668-1c0d-4987-bbc0-9e5cd4dc65f8 service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Received unexpected event network-vif-plugged-a6bfaa78-84a9-413e-b35f-f3d7485fed94 for instance with vm_state building and task_state spawning. [ 1565.459241] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1565.459622] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-365ffb14-a67f-4d78-a3bc-40134dfcc9f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.475770] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1565.475770] env[63297]: value = "task-1697837" [ 1565.475770] env[63297]: _type = "Task" [ 1565.475770] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.485326] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697837, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.520212] env[63297]: DEBUG nova.scheduler.client.report [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1565.643018] env[63297]: DEBUG nova.network.neutron [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Successfully updated port: a6bfaa78-84a9-413e-b35f-f3d7485fed94 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1565.692763] env[63297]: DEBUG oslo_vmware.api [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697836, 'name': PowerOffVM_Task, 'duration_secs': 0.405074} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.693069] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1565.693251] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1565.693541] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dadb03f7-d179-4364-831b-7926f8a28026 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.988685] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697837, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.026157] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.026381] env[63297]: DEBUG nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1566.029364] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.073s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.031316] env[63297]: INFO nova.compute.claims [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1566.111218] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1566.111480] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1566.111673] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleting the datastore file [datastore1] fba9040d-f904-44a1-8785-14d4696ea939 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1566.111948] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8c1fba8-b6a6-451d-929b-5441493de5df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.122515] env[63297]: DEBUG oslo_vmware.api [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for the task: (returnval){ [ 1566.122515] env[63297]: value = "task-1697839" [ 1566.122515] env[63297]: _type = "Task" [ 1566.122515] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.133910] env[63297]: DEBUG oslo_vmware.api [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.143466] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquiring lock "refresh_cache-be532612-7192-4771-a3dc-25bd1dc6be6b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.143466] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquired lock "refresh_cache-be532612-7192-4771-a3dc-25bd1dc6be6b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.143613] env[63297]: DEBUG nova.network.neutron [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1566.489107] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697837, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.519794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.520037] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.535136] env[63297]: DEBUG nova.compute.utils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1566.536423] env[63297]: DEBUG nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1566.536561] env[63297]: DEBUG nova.network.neutron [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1566.578032] env[63297]: DEBUG nova.policy [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20a91144677b4efba8ab91acd53d1c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33733e0599840618625ecb3e6bb6029', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1566.634164] env[63297]: DEBUG oslo_vmware.api [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Task: {'id': task-1697839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18544} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.634478] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1566.634660] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1566.634844] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1566.635409] env[63297]: INFO nova.compute.manager [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Took 1.48 seconds to destroy the instance on the hypervisor. [ 1566.635409] env[63297]: DEBUG oslo.service.loopingcall [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1566.635557] env[63297]: DEBUG nova.compute.manager [-] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1566.635557] env[63297]: DEBUG nova.network.neutron [-] [instance: fba9040d-f904-44a1-8785-14d4696ea939] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1566.713302] env[63297]: DEBUG nova.network.neutron [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1566.954243] env[63297]: DEBUG nova.network.neutron [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Updating instance_info_cache with network_info: [{"id": "a6bfaa78-84a9-413e-b35f-f3d7485fed94", "address": "fa:16:3e:17:c4:90", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bfaa78-84", "ovs_interfaceid": "a6bfaa78-84a9-413e-b35f-f3d7485fed94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.991243] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697837, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.022561] env[63297]: DEBUG nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1567.039964] env[63297]: DEBUG nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1567.345705] env[63297]: DEBUG nova.network.neutron [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Successfully created port: 50e51b32-9790-4358-80e3-22b2274beca1 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1567.456819] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Releasing lock "refresh_cache-be532612-7192-4771-a3dc-25bd1dc6be6b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.457167] env[63297]: DEBUG nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Instance network_info: |[{"id": "a6bfaa78-84a9-413e-b35f-f3d7485fed94", "address": "fa:16:3e:17:c4:90", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bfaa78-84", "ovs_interfaceid": "a6bfaa78-84a9-413e-b35f-f3d7485fed94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1567.457838] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:c4:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6bfaa78-84a9-413e-b35f-f3d7485fed94', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1567.466323] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Creating folder: Project (c19fb23a8e7c4651bca64357acf03ea7). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1567.467764] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32f6b039-7da4-440f-8ecb-5a405f29faea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.470937] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4328202-781f-4b68-b6a8-a1417aa8037f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.477703] env[63297]: DEBUG nova.compute.manager [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Received event network-changed-a6bfaa78-84a9-413e-b35f-f3d7485fed94 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1567.478901] env[63297]: DEBUG nova.compute.manager [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Refreshing instance network info cache due to event network-changed-a6bfaa78-84a9-413e-b35f-f3d7485fed94. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1567.478901] env[63297]: DEBUG oslo_concurrency.lockutils [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] Acquiring lock "refresh_cache-be532612-7192-4771-a3dc-25bd1dc6be6b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.478901] env[63297]: DEBUG oslo_concurrency.lockutils [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] Acquired lock "refresh_cache-be532612-7192-4771-a3dc-25bd1dc6be6b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.478901] env[63297]: DEBUG nova.network.neutron [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Refreshing network info cache for port a6bfaa78-84a9-413e-b35f-f3d7485fed94 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1567.496105] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9a3238-1e77-4e76-b5f3-b01ab1a5af4f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.500874] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Created folder: Project (c19fb23a8e7c4651bca64357acf03ea7) in parent group-v353718. [ 1567.500980] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Creating folder: Instances. Parent ref: group-v353952. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1567.502118] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c410a30-02a5-4aaa-b20a-ecdd2b8b81f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.515313] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697837, 'name': CloneVM_Task, 'duration_secs': 2.027313} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.547548] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Created linked-clone VM from snapshot [ 1567.555376] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe9a2b8-cdd7-4b7a-ba15-2cef69017871 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.561817] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c86788-082b-4da1-b5d5-0ca895f47baa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.565143] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Created folder: Instances in parent group-v353952. [ 1567.565355] env[63297]: DEBUG oslo.service.loopingcall [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1567.566437] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1567.567289] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-befbaabf-cf5b-4256-b71e-943af7060cda {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.589096] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.589815] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Uploading image ff925357-5cb8-4000-8dad-20b2ec00218b {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1567.594333] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5282d543-125a-458a-98b1-84e11f92b4bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.600873] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1567.600873] env[63297]: value = "task-1697842" [ 1567.600873] env[63297]: _type = "Task" [ 1567.600873] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.612414] env[63297]: DEBUG nova.compute.provider_tree [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1567.620280] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697842, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.639724] env[63297]: DEBUG oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1567.639724] env[63297]: value = "vm-353951" [ 1567.639724] env[63297]: _type = "VirtualMachine" [ 1567.639724] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1567.640539] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9fbee93b-71bd-4ce2-a8ce-09374a090980 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.648042] env[63297]: DEBUG oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lease: (returnval){ [ 1567.648042] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ceabbd-28b4-d8a9-36ce-f28f08b92962" [ 1567.648042] env[63297]: _type = "HttpNfcLease" [ 1567.648042] env[63297]: } obtained for exporting VM: (result){ [ 1567.648042] env[63297]: value = "vm-353951" [ 1567.648042] env[63297]: _type = "VirtualMachine" [ 1567.648042] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1567.648371] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the lease: (returnval){ [ 1567.648371] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ceabbd-28b4-d8a9-36ce-f28f08b92962" [ 1567.648371] env[63297]: _type = "HttpNfcLease" [ 1567.648371] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1567.658592] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1567.658592] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ceabbd-28b4-d8a9-36ce-f28f08b92962" [ 1567.658592] env[63297]: _type = "HttpNfcLease" [ 1567.658592] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1567.832354] env[63297]: DEBUG nova.network.neutron [-] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.067836] env[63297]: DEBUG nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1568.093249] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1568.093532] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1568.093701] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1568.093906] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1568.094070] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1568.094219] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1568.094427] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1568.094580] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1568.094745] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1568.094907] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1568.095090] env[63297]: DEBUG nova.virt.hardware [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1568.095964] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5086a2c-71ae-4a4d-9c95-1ab802eab7df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.110080] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224973d6-4ee7-415a-b022-fdf00e41bf51 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.117943] env[63297]: DEBUG nova.scheduler.client.report [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1568.121475] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697842, 'name': CreateVM_Task, 'duration_secs': 0.414166} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.125022] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1568.125022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.125022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.125022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1568.125022] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff008aad-e1de-4508-9f3b-fc14e205f62c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.138044] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1568.138044] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b7dba-aa2d-2b50-4bff-06d954c013a3" [ 1568.138044] env[63297]: _type = "Task" [ 1568.138044] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.156154] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b7dba-aa2d-2b50-4bff-06d954c013a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.160685] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1568.160685] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ceabbd-28b4-d8a9-36ce-f28f08b92962" [ 1568.160685] env[63297]: _type = "HttpNfcLease" [ 1568.160685] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1568.160685] env[63297]: DEBUG oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1568.160685] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ceabbd-28b4-d8a9-36ce-f28f08b92962" [ 1568.160685] env[63297]: _type = "HttpNfcLease" [ 1568.160685] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1568.161597] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04d9f75-1d11-46ff-8f2d-7d826d501dd8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.171159] env[63297]: DEBUG oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5286a118-80b2-1ea8-2966-555bbbe41dd3/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1568.171343] env[63297]: DEBUG oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5286a118-80b2-1ea8-2966-555bbbe41dd3/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1568.286270] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b954f62e-036d-4721-99c9-b357d700d516 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.299214] env[63297]: DEBUG nova.network.neutron [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Updated VIF entry in instance network info cache for port a6bfaa78-84a9-413e-b35f-f3d7485fed94. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1568.299214] env[63297]: DEBUG nova.network.neutron [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Updating instance_info_cache with network_info: [{"id": "a6bfaa78-84a9-413e-b35f-f3d7485fed94", "address": "fa:16:3e:17:c4:90", "network": {"id": "dc2f9bbb-69d1-4460-975e-9ea52dacb7a9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "766dfc413a924229a87b04aa69e3d966", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bfaa78-84", "ovs_interfaceid": "a6bfaa78-84a9-413e-b35f-f3d7485fed94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.339904] env[63297]: INFO nova.compute.manager [-] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Took 1.70 seconds to deallocate network for instance. [ 1568.625533] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.626852] env[63297]: DEBUG nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1568.633032] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.754s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.633032] env[63297]: INFO nova.compute.claims [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1568.653940] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b7dba-aa2d-2b50-4bff-06d954c013a3, 'name': SearchDatastore_Task, 'duration_secs': 0.013417} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.654567] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.654881] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1568.655172] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.655326] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.655571] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1568.655876] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4d666a0-d8ac-4602-a278-d3be8dbfb120 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.666420] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1568.666617] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1568.668504] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f19fc06f-31cb-45f9-beda-74e6084b50eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.675911] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1568.675911] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52472cac-2874-50e4-8181-1a449e8334ef" [ 1568.675911] env[63297]: _type = "Task" [ 1568.675911] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.691999] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52472cac-2874-50e4-8181-1a449e8334ef, 'name': SearchDatastore_Task, 'duration_secs': 0.010875} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.693099] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33cd94c0-035f-42a6-97d1-ef8786c82dc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.699673] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1568.699673] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525be022-4b31-1f2f-0743-f3c2a193e42b" [ 1568.699673] env[63297]: _type = "Task" [ 1568.699673] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.711165] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525be022-4b31-1f2f-0743-f3c2a193e42b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.801855] env[63297]: DEBUG oslo_concurrency.lockutils [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] Releasing lock "refresh_cache-be532612-7192-4771-a3dc-25bd1dc6be6b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.802713] env[63297]: DEBUG nova.compute.manager [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Received event network-vif-deleted-1dd69c1e-7eee-4b1b-b4a7-421ab5477495 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1568.803708] env[63297]: INFO nova.compute.manager [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Neutron deleted interface 1dd69c1e-7eee-4b1b-b4a7-421ab5477495; detaching it from the instance and deleting it from the info cache [ 1568.804197] env[63297]: DEBUG nova.network.neutron [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.848482] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.985776] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "cc644ecc-7340-421c-b966-19145eb82949" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.986095] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "cc644ecc-7340-421c-b966-19145eb82949" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.136674] env[63297]: DEBUG nova.compute.utils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1569.140780] env[63297]: DEBUG nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1569.141114] env[63297]: DEBUG nova.network.neutron [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1569.166044] env[63297]: DEBUG nova.network.neutron [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Successfully updated port: 50e51b32-9790-4358-80e3-22b2274beca1 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1569.199130] env[63297]: DEBUG nova.policy [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f80bce1510594b8a95537f814f68b2bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45448893e7ee4b8d896d1bb3f3a9ecf1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1569.212086] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525be022-4b31-1f2f-0743-f3c2a193e42b, 'name': SearchDatastore_Task, 'duration_secs': 0.010466} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.212436] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.212729] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] be532612-7192-4771-a3dc-25bd1dc6be6b/be532612-7192-4771-a3dc-25bd1dc6be6b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1569.212982] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc913722-e6e4-4246-953d-1ef64de5a968 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.221344] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1569.221344] env[63297]: value = "task-1697844" [ 1569.221344] env[63297]: _type = "Task" [ 1569.221344] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.232282] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.297562] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquiring lock "ac112251-8cc3-4f57-8983-8a07e2a068f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.298033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "ac112251-8cc3-4f57-8983-8a07e2a068f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.298402] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquiring lock "ac112251-8cc3-4f57-8983-8a07e2a068f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.298627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "ac112251-8cc3-4f57-8983-8a07e2a068f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.298797] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "ac112251-8cc3-4f57-8983-8a07e2a068f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.301337] env[63297]: INFO nova.compute.manager [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Terminating instance [ 1569.303446] env[63297]: DEBUG nova.compute.manager [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1569.303683] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1569.304546] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dabe8db-9f37-40b9-8d75-0fc962f50770 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.308301] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fcdcbedd-dc97-4683-a60c-7eaf0f79080a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.316658] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1569.318299] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24f21976-d88c-4e45-889b-2a51564285e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.322816] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2799de-8014-4357-8709-5f55fb662248 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.340651] env[63297]: DEBUG oslo_vmware.api [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1569.340651] env[63297]: value = "task-1697845" [ 1569.340651] env[63297]: _type = "Task" [ 1569.340651] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.349967] env[63297]: DEBUG oslo_vmware.api [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.367053] env[63297]: DEBUG nova.compute.manager [req-c5eefae1-9959-4699-ab6c-c3d1cc8c469b req-6fd6cf9a-db19-450a-b137-216f10031c4c service nova] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Detach interface failed, port_id=1dd69c1e-7eee-4b1b-b4a7-421ab5477495, reason: Instance fba9040d-f904-44a1-8785-14d4696ea939 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1569.489097] env[63297]: DEBUG nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1569.539367] env[63297]: DEBUG nova.compute.manager [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Received event network-vif-plugged-50e51b32-9790-4358-80e3-22b2274beca1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1569.539612] env[63297]: DEBUG oslo_concurrency.lockutils [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] Acquiring lock "f5866b1e-cd77-464e-858e-eb14dab0637c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.540042] env[63297]: DEBUG oslo_concurrency.lockutils [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] Lock "f5866b1e-cd77-464e-858e-eb14dab0637c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.540042] env[63297]: DEBUG oslo_concurrency.lockutils [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] Lock "f5866b1e-cd77-464e-858e-eb14dab0637c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.540042] env[63297]: DEBUG nova.compute.manager [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] No waiting events found dispatching network-vif-plugged-50e51b32-9790-4358-80e3-22b2274beca1 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1569.540263] env[63297]: WARNING nova.compute.manager [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Received unexpected event network-vif-plugged-50e51b32-9790-4358-80e3-22b2274beca1 for instance with vm_state building and task_state spawning. [ 1569.542745] env[63297]: DEBUG nova.compute.manager [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Received event network-changed-50e51b32-9790-4358-80e3-22b2274beca1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1569.542745] env[63297]: DEBUG nova.compute.manager [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Refreshing instance network info cache due to event network-changed-50e51b32-9790-4358-80e3-22b2274beca1. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1569.542745] env[63297]: DEBUG oslo_concurrency.lockutils [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] Acquiring lock "refresh_cache-f5866b1e-cd77-464e-858e-eb14dab0637c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.542745] env[63297]: DEBUG oslo_concurrency.lockutils [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] Acquired lock "refresh_cache-f5866b1e-cd77-464e-858e-eb14dab0637c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.542745] env[63297]: DEBUG nova.network.neutron [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Refreshing network info cache for port 50e51b32-9790-4358-80e3-22b2274beca1 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1569.577766] env[63297]: DEBUG nova.network.neutron [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Successfully created port: 1fc6d6cc-328f-4608-becc-2ab8cce8dc98 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1569.641865] env[63297]: DEBUG nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1569.666748] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "refresh_cache-f5866b1e-cd77-464e-858e-eb14dab0637c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.738217] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697844, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.854300] env[63297]: DEBUG oslo_vmware.api [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697845, 'name': PowerOffVM_Task, 'duration_secs': 0.272542} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.855064] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1569.855064] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1569.855249] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ad86e18-020f-4519-a521-bf2bf5493349 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.976034] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1569.976034] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1569.976034] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Deleting the datastore file [datastore1] ac112251-8cc3-4f57-8983-8a07e2a068f8 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1569.976034] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9c797c1-0973-4b94-9fee-e6d04f0e1838 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.982958] env[63297]: DEBUG oslo_vmware.api [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for the task: (returnval){ [ 1569.982958] env[63297]: value = "task-1697847" [ 1569.982958] env[63297]: _type = "Task" [ 1569.982958] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.995034] env[63297]: DEBUG oslo_vmware.api [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.019071] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.105949] env[63297]: DEBUG nova.network.neutron [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1570.145139] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fafbb8e-987a-475b-8080-7e71604afbe7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.159674] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8fda78-1656-4d82-888b-fdf9c7182a65 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.197087] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e6ba72-70d5-4157-a1e8-dc3522179f26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.206871] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4b3d28-3b53-4f15-9731-f500ef9a693b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.225252] env[63297]: DEBUG nova.compute.provider_tree [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1570.235696] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637602} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.236592] env[63297]: DEBUG nova.network.neutron [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.238239] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] be532612-7192-4771-a3dc-25bd1dc6be6b/be532612-7192-4771-a3dc-25bd1dc6be6b.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1570.238325] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1570.238745] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9b0f8fc-a5b6-40ba-973c-fedea09902a3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.247051] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1570.247051] env[63297]: value = "task-1697848" [ 1570.247051] env[63297]: _type = "Task" [ 1570.247051] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.256735] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697848, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.494990] env[63297]: DEBUG oslo_vmware.api [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Task: {'id': task-1697847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236821} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.495297] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1570.495512] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1570.495692] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1570.495871] env[63297]: INFO nova.compute.manager [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1570.496136] env[63297]: DEBUG oslo.service.loopingcall [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1570.496364] env[63297]: DEBUG nova.compute.manager [-] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1570.496454] env[63297]: DEBUG nova.network.neutron [-] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1570.660187] env[63297]: DEBUG nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1570.694037] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1570.694340] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1570.694504] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1570.694728] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1570.694882] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1570.695244] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1570.695540] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1570.695733] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1570.695907] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1570.696094] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1570.696478] env[63297]: DEBUG nova.virt.hardware [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1570.697545] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1560825f-18e9-43fb-92fc-b5a50f97510b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.706840] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8e8301-2bf2-4451-adb1-48b1f82039e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.732057] env[63297]: DEBUG nova.scheduler.client.report [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1570.739476] env[63297]: DEBUG oslo_concurrency.lockutils [req-f35f9b0d-2edb-48da-acc7-3010273fb6e5 req-ea623661-b2ea-4b54-baa6-24f150253407 service nova] Releasing lock "refresh_cache-f5866b1e-cd77-464e-858e-eb14dab0637c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.739913] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "refresh_cache-f5866b1e-cd77-464e-858e-eb14dab0637c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.740129] env[63297]: DEBUG nova.network.neutron [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1570.761870] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697848, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082429} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.761870] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1570.762138] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7618b2e9-5373-4ac4-8e76-0afe550e9c2a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.789969] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] be532612-7192-4771-a3dc-25bd1dc6be6b/be532612-7192-4771-a3dc-25bd1dc6be6b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1570.790215] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ea9bb15-c496-440a-a3a5-5889cdce7b92 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.817573] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1570.817573] env[63297]: value = "task-1697849" [ 1570.817573] env[63297]: _type = "Task" [ 1570.817573] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.828827] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.237615] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.608s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.238196] env[63297]: DEBUG nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1571.240968] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.051s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.241154] env[63297]: DEBUG nova.objects.instance [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lazy-loading 'resources' on Instance uuid 57d93827-2a5a-4f12-a74b-147a1a934dd1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1571.295316] env[63297]: DEBUG nova.network.neutron [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1571.327830] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697849, 'name': ReconfigVM_Task, 'duration_secs': 0.301237} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.328148] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Reconfigured VM instance instance-00000054 to attach disk [datastore1] be532612-7192-4771-a3dc-25bd1dc6be6b/be532612-7192-4771-a3dc-25bd1dc6be6b.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1571.328854] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbe0849c-d699-416f-9c1d-8e38b7219c60 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.336472] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1571.336472] env[63297]: value = "task-1697850" [ 1571.336472] env[63297]: _type = "Task" [ 1571.336472] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.345313] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697850, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.495125] env[63297]: DEBUG nova.network.neutron [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Successfully updated port: 1fc6d6cc-328f-4608-becc-2ab8cce8dc98 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1571.548385] env[63297]: DEBUG nova.network.neutron [-] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.556644] env[63297]: DEBUG nova.network.neutron [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Updating instance_info_cache with network_info: [{"id": "50e51b32-9790-4358-80e3-22b2274beca1", "address": "fa:16:3e:db:03:9e", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e51b32-97", "ovs_interfaceid": "50e51b32-9790-4358-80e3-22b2274beca1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.575097] env[63297]: DEBUG nova.compute.manager [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Received event network-vif-deleted-20681722-92b3-46f4-bd82-1775db48a289 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1571.575097] env[63297]: DEBUG nova.compute.manager [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Received event network-vif-plugged-1fc6d6cc-328f-4608-becc-2ab8cce8dc98 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1571.575336] env[63297]: DEBUG oslo_concurrency.lockutils [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] Acquiring lock "a10df0e9-4278-48f1-b111-864ac793f630-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.575380] env[63297]: DEBUG oslo_concurrency.lockutils [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] Lock "a10df0e9-4278-48f1-b111-864ac793f630-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.576029] env[63297]: DEBUG oslo_concurrency.lockutils [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] Lock "a10df0e9-4278-48f1-b111-864ac793f630-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.576029] env[63297]: DEBUG nova.compute.manager [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] No waiting events found dispatching network-vif-plugged-1fc6d6cc-328f-4608-becc-2ab8cce8dc98 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1571.576029] env[63297]: WARNING nova.compute.manager [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Received unexpected event network-vif-plugged-1fc6d6cc-328f-4608-becc-2ab8cce8dc98 for instance with vm_state building and task_state spawning. [ 1571.576029] env[63297]: DEBUG nova.compute.manager [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Received event network-changed-1fc6d6cc-328f-4608-becc-2ab8cce8dc98 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1571.576243] env[63297]: DEBUG nova.compute.manager [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Refreshing instance network info cache due to event network-changed-1fc6d6cc-328f-4608-becc-2ab8cce8dc98. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1571.576366] env[63297]: DEBUG oslo_concurrency.lockutils [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] Acquiring lock "refresh_cache-a10df0e9-4278-48f1-b111-864ac793f630" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.576527] env[63297]: DEBUG oslo_concurrency.lockutils [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] Acquired lock "refresh_cache-a10df0e9-4278-48f1-b111-864ac793f630" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.577230] env[63297]: DEBUG nova.network.neutron [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Refreshing network info cache for port 1fc6d6cc-328f-4608-becc-2ab8cce8dc98 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1571.744381] env[63297]: DEBUG nova.compute.utils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1571.746135] env[63297]: DEBUG nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1571.746135] env[63297]: DEBUG nova.network.neutron [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1571.804430] env[63297]: DEBUG nova.policy [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ab498375eb47a3923ac10343c11d34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d5cb4b4799b4b8b99648e718dbc0254', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1571.849218] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697850, 'name': Rename_Task, 'duration_secs': 0.157532} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.850026] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1571.850164] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8e4ae4f-fae0-4a05-8a43-c579a6d639f9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.856949] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1571.856949] env[63297]: value = "task-1697851" [ 1571.856949] env[63297]: _type = "Task" [ 1571.856949] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.866853] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697851, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.997725] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-a10df0e9-4278-48f1-b111-864ac793f630" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.054135] env[63297]: INFO nova.compute.manager [-] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Took 1.56 seconds to deallocate network for instance. [ 1572.059697] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "refresh_cache-f5866b1e-cd77-464e-858e-eb14dab0637c" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.060064] env[63297]: DEBUG nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Instance network_info: |[{"id": "50e51b32-9790-4358-80e3-22b2274beca1", "address": "fa:16:3e:db:03:9e", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e51b32-97", "ovs_interfaceid": "50e51b32-9790-4358-80e3-22b2274beca1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1572.060751] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:03:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50e51b32-9790-4358-80e3-22b2274beca1', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1572.070135] env[63297]: DEBUG oslo.service.loopingcall [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1572.074302] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1572.075473] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1206b39f-f25c-40aa-bd68-d6b506ea2292 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.105144] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1572.105144] env[63297]: value = "task-1697852" [ 1572.105144] env[63297]: _type = "Task" [ 1572.105144] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.119096] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697852, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.125657] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513c2bf0-9d5c-48b0-bb3c-dea4783e2c44 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.135596] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545cff8a-d171-42b4-9ff4-a9a64ceae058 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.179618] env[63297]: DEBUG nova.network.neutron [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1572.182727] env[63297]: DEBUG nova.network.neutron [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Successfully created port: 6965237d-5263-4cc5-b1ac-d89a3ac02360 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1572.185711] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c733df4d-a5da-48ab-affe-51b572957175 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.198016] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7892f6f-b6b0-4c01-b2e6-4d8acf50026e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.216904] env[63297]: DEBUG nova.compute.provider_tree [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1572.251548] env[63297]: DEBUG nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1572.369666] env[63297]: DEBUG oslo_vmware.api [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697851, 'name': PowerOnVM_Task, 'duration_secs': 0.488403} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.370025] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1572.370536] env[63297]: INFO nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1572.370627] env[63297]: DEBUG nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1572.371563] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6157469-1c4e-40d1-b14f-85c243216fba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.375706] env[63297]: DEBUG nova.network.neutron [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.562603] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.617260] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697852, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.721091] env[63297]: DEBUG nova.scheduler.client.report [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1572.882751] env[63297]: DEBUG oslo_concurrency.lockutils [req-a13f214c-6898-4c8d-9fb8-79759739632a req-ec0a6bcb-2d2d-4228-b182-11fad45ba10d service nova] Releasing lock "refresh_cache-a10df0e9-4278-48f1-b111-864ac793f630" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.887160] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-a10df0e9-4278-48f1-b111-864ac793f630" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.887344] env[63297]: DEBUG nova.network.neutron [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1572.890422] env[63297]: INFO nova.compute.manager [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Took 22.66 seconds to build instance. [ 1573.117932] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697852, 'name': CreateVM_Task, 'duration_secs': 0.780548} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.118244] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1573.118998] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.119267] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.119686] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1573.119989] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddf4260f-c02d-4de3-920a-840e544caa06 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.125377] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1573.125377] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d4b220-1c84-7684-79a2-1025f8dafba1" [ 1573.125377] env[63297]: _type = "Task" [ 1573.125377] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.134549] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d4b220-1c84-7684-79a2-1025f8dafba1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.226957] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.986s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.229339] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.661s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.230896] env[63297]: INFO nova.compute.claims [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1573.253732] env[63297]: INFO nova.scheduler.client.report [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted allocations for instance 57d93827-2a5a-4f12-a74b-147a1a934dd1 [ 1573.261509] env[63297]: DEBUG nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1573.290256] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='f2d41f3a536be15ff855267d866e87a8',container_format='bare',created_at=2024-12-10T17:28:08Z,direct_url=,disk_format='vmdk',id=dc932a9e-d404-4f38-8fb5-26f5f3f4b879,min_disk=1,min_ram=0,name='tempest-test-snap-374453168',owner='2d5cb4b4799b4b8b99648e718dbc0254',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-10T17:28:27Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1573.290556] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1573.290751] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1573.290971] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1573.291166] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1573.291349] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1573.291682] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1573.291751] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1573.291884] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1573.292133] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1573.292422] env[63297]: DEBUG nova.virt.hardware [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1573.293545] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b38d472-67be-451a-a25a-6212e2426419 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.302631] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919aaedf-e46b-4f32-8173-a8945915ace3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.393505] env[63297]: DEBUG oslo_concurrency.lockutils [None req-739ca28c-4363-43c8-976b-f9cdebed3cf8 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "be532612-7192-4771-a3dc-25bd1dc6be6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.175s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.433673] env[63297]: DEBUG nova.network.neutron [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1573.590852] env[63297]: DEBUG nova.network.neutron [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Updating instance_info_cache with network_info: [{"id": "1fc6d6cc-328f-4608-becc-2ab8cce8dc98", "address": "fa:16:3e:2c:c8:c0", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fc6d6cc-32", "ovs_interfaceid": "1fc6d6cc-328f-4608-becc-2ab8cce8dc98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.640242] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d4b220-1c84-7684-79a2-1025f8dafba1, 'name': SearchDatastore_Task, 'duration_secs': 0.01089} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.640617] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.640875] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1573.641134] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.641286] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.641464] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.642017] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86ba00bc-9a05-431b-b56f-1b78baa6085e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.654530] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.654730] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1573.655507] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11d181f2-88bc-41c7-97d0-06133f818067 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.663015] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1573.663015] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f320ab-b7e5-58b2-66ed-47f5447037b0" [ 1573.663015] env[63297]: _type = "Task" [ 1573.663015] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.671763] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f320ab-b7e5-58b2-66ed-47f5447037b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.684439] env[63297]: DEBUG nova.compute.manager [None req-2022dca0-3ed4-4901-b446-b31b37ba4a44 tempest-ServerDiagnosticsTest-1286251442 tempest-ServerDiagnosticsTest-1286251442-project-admin] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1573.685577] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6876686e-6b1f-4268-9378-d00528bec773 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.694064] env[63297]: INFO nova.compute.manager [None req-2022dca0-3ed4-4901-b446-b31b37ba4a44 tempest-ServerDiagnosticsTest-1286251442 tempest-ServerDiagnosticsTest-1286251442-project-admin] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Retrieving diagnostics [ 1573.694522] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d98835-51e2-4fa7-9016-d8374c84498e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.762874] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d10c6581-dbfc-4b28-ab19-c5aae3365314 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "57d93827-2a5a-4f12-a74b-147a1a934dd1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.544s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.852265] env[63297]: DEBUG nova.network.neutron [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Successfully updated port: 6965237d-5263-4cc5-b1ac-d89a3ac02360 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1573.859908] env[63297]: DEBUG nova.compute.manager [req-890805c2-6dc4-4ada-b072-ed59e211d1cf req-3d8110cc-4428-4d4b-bb86-66a3ff55ac65 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Received event network-vif-plugged-6965237d-5263-4cc5-b1ac-d89a3ac02360 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1573.860145] env[63297]: DEBUG oslo_concurrency.lockutils [req-890805c2-6dc4-4ada-b072-ed59e211d1cf req-3d8110cc-4428-4d4b-bb86-66a3ff55ac65 service nova] Acquiring lock "310cf8d4-613a-4c35-b118-7d79138e4799-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.860355] env[63297]: DEBUG oslo_concurrency.lockutils [req-890805c2-6dc4-4ada-b072-ed59e211d1cf req-3d8110cc-4428-4d4b-bb86-66a3ff55ac65 service nova] Lock "310cf8d4-613a-4c35-b118-7d79138e4799-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.860610] env[63297]: DEBUG oslo_concurrency.lockutils [req-890805c2-6dc4-4ada-b072-ed59e211d1cf req-3d8110cc-4428-4d4b-bb86-66a3ff55ac65 service nova] Lock "310cf8d4-613a-4c35-b118-7d79138e4799-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.860795] env[63297]: DEBUG nova.compute.manager [req-890805c2-6dc4-4ada-b072-ed59e211d1cf req-3d8110cc-4428-4d4b-bb86-66a3ff55ac65 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] No waiting events found dispatching network-vif-plugged-6965237d-5263-4cc5-b1ac-d89a3ac02360 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1573.860962] env[63297]: WARNING nova.compute.manager [req-890805c2-6dc4-4ada-b072-ed59e211d1cf req-3d8110cc-4428-4d4b-bb86-66a3ff55ac65 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Received unexpected event network-vif-plugged-6965237d-5263-4cc5-b1ac-d89a3ac02360 for instance with vm_state building and task_state spawning. [ 1574.093578] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-a10df0e9-4278-48f1-b111-864ac793f630" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.093961] env[63297]: DEBUG nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Instance network_info: |[{"id": "1fc6d6cc-328f-4608-becc-2ab8cce8dc98", "address": "fa:16:3e:2c:c8:c0", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fc6d6cc-32", "ovs_interfaceid": "1fc6d6cc-328f-4608-becc-2ab8cce8dc98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1574.094232] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:c8:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fc6d6cc-328f-4608-becc-2ab8cce8dc98', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1574.102739] env[63297]: DEBUG oslo.service.loopingcall [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1574.102986] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1574.103233] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92105575-6c2c-44b0-a42c-633c65a60b6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.124929] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1574.124929] env[63297]: value = "task-1697853" [ 1574.124929] env[63297]: _type = "Task" [ 1574.124929] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.133649] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697853, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.176442] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f320ab-b7e5-58b2-66ed-47f5447037b0, 'name': SearchDatastore_Task, 'duration_secs': 0.01027} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.177366] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48c1b02c-5b66-4d45-b036-7770e3b9d8c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.183844] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1574.183844] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52330712-6ef2-f7cd-338c-fc391640ce69" [ 1574.183844] env[63297]: _type = "Task" [ 1574.183844] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.194097] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52330712-6ef2-f7cd-338c-fc391640ce69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.355483] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "refresh_cache-310cf8d4-613a-4c35-b118-7d79138e4799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.356035] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "refresh_cache-310cf8d4-613a-4c35-b118-7d79138e4799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.356035] env[63297]: DEBUG nova.network.neutron [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1574.559775] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfd205a-8d21-44f6-adbe-04a6322308a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.568021] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b565fd61-8365-4b3c-abaa-63955cc089c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.598992] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fc8771-638e-4f7e-a736-198f9a111976 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.606529] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d61403-6684-4189-b797-fe617338a9e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.620037] env[63297]: DEBUG nova.compute.provider_tree [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.634900] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697853, 'name': CreateVM_Task, 'duration_secs': 0.390681} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.635086] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1574.635702] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.635863] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.636204] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1574.636470] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22157123-255c-47f3-a133-d43830976eb9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.641083] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1574.641083] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f13a36-b1e0-1b54-b605-f438adfb422d" [ 1574.641083] env[63297]: _type = "Task" [ 1574.641083] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.649364] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f13a36-b1e0-1b54-b605-f438adfb422d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.695972] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52330712-6ef2-f7cd-338c-fc391640ce69, 'name': SearchDatastore_Task, 'duration_secs': 0.010504} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.696271] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.697019] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f5866b1e-cd77-464e-858e-eb14dab0637c/f5866b1e-cd77-464e-858e-eb14dab0637c.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1574.697019] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a442bddf-25fc-41bf-97a9-0593d863bb31 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.705319] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1574.705319] env[63297]: value = "task-1697854" [ 1574.705319] env[63297]: _type = "Task" [ 1574.705319] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.714350] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.760169] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquiring lock "be532612-7192-4771-a3dc-25bd1dc6be6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.760505] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "be532612-7192-4771-a3dc-25bd1dc6be6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.760842] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquiring lock "be532612-7192-4771-a3dc-25bd1dc6be6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.761072] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "be532612-7192-4771-a3dc-25bd1dc6be6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.761308] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "be532612-7192-4771-a3dc-25bd1dc6be6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.763682] env[63297]: INFO nova.compute.manager [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Terminating instance [ 1574.765801] env[63297]: DEBUG nova.compute.manager [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1574.766043] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1574.767100] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ed94a6-e441-4bd3-a65c-60b9599757ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.777123] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1574.777450] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c05c3c23-f0fa-4635-9b4a-53ac3b63ec13 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.785349] env[63297]: DEBUG oslo_vmware.api [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1574.785349] env[63297]: value = "task-1697855" [ 1574.785349] env[63297]: _type = "Task" [ 1574.785349] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.796787] env[63297]: DEBUG oslo_vmware.api [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697855, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.911465] env[63297]: DEBUG nova.network.neutron [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1575.092735] env[63297]: DEBUG nova.network.neutron [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Updating instance_info_cache with network_info: [{"id": "6965237d-5263-4cc5-b1ac-d89a3ac02360", "address": "fa:16:3e:01:15:5e", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6965237d-52", "ovs_interfaceid": "6965237d-5263-4cc5-b1ac-d89a3ac02360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.123369] env[63297]: DEBUG nova.scheduler.client.report [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1575.153418] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f13a36-b1e0-1b54-b605-f438adfb422d, 'name': SearchDatastore_Task, 'duration_secs': 0.010171} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.153769] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.154018] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1575.154281] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.154420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.154605] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1575.154909] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cf38b32-1b81-4d52-b285-9f2a5738285b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.215909] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697854, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.297313] env[63297]: DEBUG oslo_vmware.api [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697855, 'name': PowerOffVM_Task, 'duration_secs': 0.264133} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.297586] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1575.297759] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1575.298081] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-487f8716-3c89-4479-a01c-bb59ccfb8cc0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.300999] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1575.301214] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1575.302424] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96c51b5c-9c7a-47d8-bcae-3f7c74a02180 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.309601] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1575.309601] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e29997-b6d1-aee3-9189-ab94e35d83eb" [ 1575.309601] env[63297]: _type = "Task" [ 1575.309601] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.319444] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e29997-b6d1-aee3-9189-ab94e35d83eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.389881] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1575.390265] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1575.390385] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Deleting the datastore file [datastore1] be532612-7192-4771-a3dc-25bd1dc6be6b {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1575.390710] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f059000-0444-4d40-84a6-877127fd392b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.398313] env[63297]: DEBUG oslo_vmware.api [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for the task: (returnval){ [ 1575.398313] env[63297]: value = "task-1697857" [ 1575.398313] env[63297]: _type = "Task" [ 1575.398313] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.408052] env[63297]: DEBUG oslo_vmware.api [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.595764] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "refresh_cache-310cf8d4-613a-4c35-b118-7d79138e4799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.596146] env[63297]: DEBUG nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Instance network_info: |[{"id": "6965237d-5263-4cc5-b1ac-d89a3ac02360", "address": "fa:16:3e:01:15:5e", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6965237d-52", "ovs_interfaceid": "6965237d-5263-4cc5-b1ac-d89a3ac02360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1575.596655] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:15:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6965237d-5263-4cc5-b1ac-d89a3ac02360', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1575.605059] env[63297]: DEBUG oslo.service.loopingcall [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.605775] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1575.605775] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e81f0f4-c558-46ee-a2cb-1ebf0707cb1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.627144] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1575.627144] env[63297]: value = "task-1697858" [ 1575.627144] env[63297]: _type = "Task" [ 1575.627144] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.630885] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.631398] env[63297]: DEBUG nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1575.633905] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.386s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.635387] env[63297]: INFO nova.compute.claims [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1575.644098] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697858, 'name': CreateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.717094] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697854, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.821798] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e29997-b6d1-aee3-9189-ab94e35d83eb, 'name': SearchDatastore_Task, 'duration_secs': 0.01341} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.822764] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77e5512e-80fc-48c3-9add-daa622f1240b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.829700] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1575.829700] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52723606-a40c-5374-34a5-a70a1b64cf7c" [ 1575.829700] env[63297]: _type = "Task" [ 1575.829700] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.838588] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52723606-a40c-5374-34a5-a70a1b64cf7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.908890] env[63297]: DEBUG oslo_vmware.api [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Task: {'id': task-1697857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267887} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.909221] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1575.909495] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1575.913019] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1575.913019] env[63297]: INFO nova.compute.manager [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1575.913019] env[63297]: DEBUG oslo.service.loopingcall [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.913019] env[63297]: DEBUG nova.compute.manager [-] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1575.913019] env[63297]: DEBUG nova.network.neutron [-] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1575.917621] env[63297]: DEBUG nova.compute.manager [req-0440d128-89d0-49b0-8621-4990f570adbc req-e0c3c980-2b47-4d00-8419-923b403216f8 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Received event network-changed-6965237d-5263-4cc5-b1ac-d89a3ac02360 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1575.917792] env[63297]: DEBUG nova.compute.manager [req-0440d128-89d0-49b0-8621-4990f570adbc req-e0c3c980-2b47-4d00-8419-923b403216f8 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Refreshing instance network info cache due to event network-changed-6965237d-5263-4cc5-b1ac-d89a3ac02360. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1575.918836] env[63297]: DEBUG oslo_concurrency.lockutils [req-0440d128-89d0-49b0-8621-4990f570adbc req-e0c3c980-2b47-4d00-8419-923b403216f8 service nova] Acquiring lock "refresh_cache-310cf8d4-613a-4c35-b118-7d79138e4799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.918836] env[63297]: DEBUG oslo_concurrency.lockutils [req-0440d128-89d0-49b0-8621-4990f570adbc req-e0c3c980-2b47-4d00-8419-923b403216f8 service nova] Acquired lock "refresh_cache-310cf8d4-613a-4c35-b118-7d79138e4799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.918836] env[63297]: DEBUG nova.network.neutron [req-0440d128-89d0-49b0-8621-4990f570adbc req-e0c3c980-2b47-4d00-8419-923b403216f8 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Refreshing network info cache for port 6965237d-5263-4cc5-b1ac-d89a3ac02360 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1576.141075] env[63297]: DEBUG nova.compute.utils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1576.146024] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697858, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.146024] env[63297]: DEBUG nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1576.146024] env[63297]: DEBUG nova.network.neutron [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1576.215888] env[63297]: DEBUG nova.policy [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36046b24c0d4468989e10779f4baac3a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e8922c66c5647dd918c489f4f9bac73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1576.221576] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697854, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.342310] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52723606-a40c-5374-34a5-a70a1b64cf7c, 'name': SearchDatastore_Task, 'duration_secs': 0.021099} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.342310] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.342310] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] a10df0e9-4278-48f1-b111-864ac793f630/a10df0e9-4278-48f1-b111-864ac793f630.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1576.342310] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43aad324-2622-4bc7-a095-374099d1301e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.353028] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1576.353028] env[63297]: value = "task-1697859" [ 1576.353028] env[63297]: _type = "Task" [ 1576.353028] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.359298] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.564623] env[63297]: DEBUG oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5286a118-80b2-1ea8-2966-555bbbe41dd3/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1576.565808] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4eb093-2a04-4b13-b15f-69c0501ed41e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.582961] env[63297]: DEBUG oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5286a118-80b2-1ea8-2966-555bbbe41dd3/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1576.583195] env[63297]: ERROR oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5286a118-80b2-1ea8-2966-555bbbe41dd3/disk-0.vmdk due to incomplete transfer. [ 1576.583501] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6ad72261-ca29-4de1-852c-4b030313a3d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.596295] env[63297]: DEBUG oslo_vmware.rw_handles [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5286a118-80b2-1ea8-2966-555bbbe41dd3/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1576.596534] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Uploaded image ff925357-5cb8-4000-8dad-20b2ec00218b to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1576.599816] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1576.604205] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-14b91f47-6289-4fff-aef2-429786470e6b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.615331] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1576.615331] env[63297]: value = "task-1697860" [ 1576.615331] env[63297]: _type = "Task" [ 1576.615331] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.625420] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697860, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.639019] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697858, 'name': CreateVM_Task, 'duration_secs': 0.520488} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.639226] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1576.639959] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.640145] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.640536] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1576.640823] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-553c9b1c-9ee6-43e3-80df-bb34829abd33 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.646883] env[63297]: DEBUG nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1576.653207] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1576.653207] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f5a272-47f0-ed89-e7a3-1031f94dff17" [ 1576.653207] env[63297]: _type = "Task" [ 1576.653207] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.666106] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f5a272-47f0-ed89-e7a3-1031f94dff17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.676015] env[63297]: DEBUG nova.network.neutron [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Successfully created port: ec16ec14-bcee-41ea-a7de-02e85d2b1169 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1576.721567] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697854, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.632381} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.722486] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f5866b1e-cd77-464e-858e-eb14dab0637c/f5866b1e-cd77-464e-858e-eb14dab0637c.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1576.722486] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1576.722808] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a46d6a8-4fa9-4609-9c92-f65937554f3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.743406] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1576.743406] env[63297]: value = "task-1697861" [ 1576.743406] env[63297]: _type = "Task" [ 1576.743406] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.752207] env[63297]: DEBUG nova.network.neutron [-] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.764812] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.867050] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697859, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.908410] env[63297]: DEBUG nova.network.neutron [req-0440d128-89d0-49b0-8621-4990f570adbc req-e0c3c980-2b47-4d00-8419-923b403216f8 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Updated VIF entry in instance network info cache for port 6965237d-5263-4cc5-b1ac-d89a3ac02360. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1576.908921] env[63297]: DEBUG nova.network.neutron [req-0440d128-89d0-49b0-8621-4990f570adbc req-e0c3c980-2b47-4d00-8419-923b403216f8 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Updating instance_info_cache with network_info: [{"id": "6965237d-5263-4cc5-b1ac-d89a3ac02360", "address": "fa:16:3e:01:15:5e", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6965237d-52", "ovs_interfaceid": "6965237d-5263-4cc5-b1ac-d89a3ac02360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.050339] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81744399-62d0-4188-9dcd-5abc7d68496b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.059020] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb72dca-62ba-46bd-86de-9bb66654a593 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.103312] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a234974e-cf1d-4924-8d8d-3939ed7de66e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.112800] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e96ce2f-7984-47c0-a58a-bafdd604bc0d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.124702] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697860, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.132368] env[63297]: DEBUG nova.compute.provider_tree [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1577.166974] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.167372] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Processing image dc932a9e-d404-4f38-8fb5-26f5f3f4b879 {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1577.167506] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879/dc932a9e-d404-4f38-8fb5-26f5f3f4b879.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.167651] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879/dc932a9e-d404-4f38-8fb5-26f5f3f4b879.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.167833] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1577.168095] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac8642dd-8014-49a7-ace7-54974285b9b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.177388] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1577.177972] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1577.178458] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3299961-cb83-4e9f-8ac7-c1d592d9f830 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.184385] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1577.184385] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52966966-966c-6a9b-85e2-e5989b06b1ce" [ 1577.184385] env[63297]: _type = "Task" [ 1577.184385] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.192741] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52966966-966c-6a9b-85e2-e5989b06b1ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.253451] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123302} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.254079] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1577.254365] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65061767-b67c-46bb-beac-b2975e685d16 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.256933] env[63297]: INFO nova.compute.manager [-] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Took 1.35 seconds to deallocate network for instance. [ 1577.279823] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] f5866b1e-cd77-464e-858e-eb14dab0637c/f5866b1e-cd77-464e-858e-eb14dab0637c.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1577.280252] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a97d8b71-9827-4546-b92c-63ada9f39ca8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.300350] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1577.300350] env[63297]: value = "task-1697862" [ 1577.300350] env[63297]: _type = "Task" [ 1577.300350] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.310441] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697862, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.364389] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522986} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.364970] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] a10df0e9-4278-48f1-b111-864ac793f630/a10df0e9-4278-48f1-b111-864ac793f630.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1577.365207] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1577.365466] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cecd6540-beec-4b4f-884e-f9f8b0137958 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.372878] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1577.372878] env[63297]: value = "task-1697863" [ 1577.372878] env[63297]: _type = "Task" [ 1577.372878] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.382381] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697863, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.411172] env[63297]: DEBUG oslo_concurrency.lockutils [req-0440d128-89d0-49b0-8621-4990f570adbc req-e0c3c980-2b47-4d00-8419-923b403216f8 service nova] Releasing lock "refresh_cache-310cf8d4-613a-4c35-b118-7d79138e4799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.626149] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697860, 'name': Destroy_Task, 'duration_secs': 0.678783} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.626407] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Destroyed the VM [ 1577.626648] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1577.626903] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7d30cfb3-acdc-4397-b5b1-2b04418de3f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.633639] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1577.633639] env[63297]: value = "task-1697864" [ 1577.633639] env[63297]: _type = "Task" [ 1577.633639] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.637677] env[63297]: DEBUG nova.scheduler.client.report [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1577.647143] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697864, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.662272] env[63297]: DEBUG nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1577.686484] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1577.686718] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1577.686874] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1577.687063] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1577.687211] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1577.687353] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1577.687555] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1577.687721] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1577.687871] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1577.688038] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1577.688212] env[63297]: DEBUG nova.virt.hardware [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1577.689175] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defcc5d8-38a8-437a-8e85-e8244b017636 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.702751] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Preparing fetch location {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1577.702946] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Fetch image to [datastore1] OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a/OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a.vmdk {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1577.703148] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Downloading stream optimized image dc932a9e-d404-4f38-8fb5-26f5f3f4b879 to [datastore1] OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a/OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a.vmdk on the data store datastore1 as vApp {{(pid=63297) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1577.703319] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Downloading image file data dc932a9e-d404-4f38-8fb5-26f5f3f4b879 to the ESX as VM named 'OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a' {{(pid=63297) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1577.706329] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ab11ff-cc8e-4a0d-a8ef-18a8e24443ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.772517] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.774062] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1577.774062] env[63297]: value = "resgroup-9" [ 1577.774062] env[63297]: _type = "ResourcePool" [ 1577.774062] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1577.774323] env[63297]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-21a0842a-eeb7-40e1-bec6-cf63543f15c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.798236] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lease: (returnval){ [ 1577.798236] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b6dfba-d68e-8500-6d7d-4b97e9d25cdf" [ 1577.798236] env[63297]: _type = "HttpNfcLease" [ 1577.798236] env[63297]: } obtained for vApp import into resource pool (val){ [ 1577.798236] env[63297]: value = "resgroup-9" [ 1577.798236] env[63297]: _type = "ResourcePool" [ 1577.798236] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1577.798486] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the lease: (returnval){ [ 1577.798486] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b6dfba-d68e-8500-6d7d-4b97e9d25cdf" [ 1577.798486] env[63297]: _type = "HttpNfcLease" [ 1577.798486] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1577.810792] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1577.810792] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b6dfba-d68e-8500-6d7d-4b97e9d25cdf" [ 1577.810792] env[63297]: _type = "HttpNfcLease" [ 1577.810792] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1577.813599] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697862, 'name': ReconfigVM_Task, 'duration_secs': 0.323858} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.813850] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Reconfigured VM instance instance-00000055 to attach disk [datastore1] f5866b1e-cd77-464e-858e-eb14dab0637c/f5866b1e-cd77-464e-858e-eb14dab0637c.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1577.814516] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfdbf1d5-11c9-4a79-bcaf-fb84e2d2eefb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.822025] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1577.822025] env[63297]: value = "task-1697866" [ 1577.822025] env[63297]: _type = "Task" [ 1577.822025] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.830683] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697866, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.883843] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697863, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.188395} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.884260] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1577.885156] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc261e4e-2a4f-49a9-93d6-321cf5433c3a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.913172] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] a10df0e9-4278-48f1-b111-864ac793f630/a10df0e9-4278-48f1-b111-864ac793f630.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1577.913568] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09562b8e-8ab7-4329-88fe-cc45533779a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.935242] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1577.935242] env[63297]: value = "task-1697867" [ 1577.935242] env[63297]: _type = "Task" [ 1577.935242] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.944712] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697867, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.965752] env[63297]: DEBUG nova.compute.manager [req-516b2e33-b2aa-4923-a3b1-0113f061de59 req-c1aec6cc-139d-4911-a904-7d7176654041 service nova] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Received event network-vif-deleted-a6bfaa78-84a9-413e-b35f-f3d7485fed94 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1578.144598] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.511s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.145254] env[63297]: DEBUG nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1578.147973] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697864, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.148468] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.560s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.150272] env[63297]: INFO nova.compute.claims [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1578.308310] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1578.308310] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b6dfba-d68e-8500-6d7d-4b97e9d25cdf" [ 1578.308310] env[63297]: _type = "HttpNfcLease" [ 1578.308310] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1578.332402] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697866, 'name': Rename_Task, 'duration_secs': 0.225098} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.332679] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1578.332933] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d4ed709-bbe9-4f56-b09f-b71c5f7e7b2f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.340637] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1578.340637] env[63297]: value = "task-1697868" [ 1578.340637] env[63297]: _type = "Task" [ 1578.340637] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.349986] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.445673] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697867, 'name': ReconfigVM_Task, 'duration_secs': 0.401949} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.446322] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Reconfigured VM instance instance-00000056 to attach disk [datastore1] a10df0e9-4278-48f1-b111-864ac793f630/a10df0e9-4278-48f1-b111-864ac793f630.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1578.447131] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-307741bd-3b4d-4da9-ac69-ff0c1e3c914d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.454344] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1578.454344] env[63297]: value = "task-1697869" [ 1578.454344] env[63297]: _type = "Task" [ 1578.454344] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.464647] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697869, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.640190] env[63297]: DEBUG nova.network.neutron [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Successfully updated port: ec16ec14-bcee-41ea-a7de-02e85d2b1169 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1578.647761] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697864, 'name': RemoveSnapshot_Task, 'duration_secs': 0.548759} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.648070] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1578.648390] env[63297]: DEBUG nova.compute.manager [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1578.649828] env[63297]: DEBUG nova.compute.utils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1578.651652] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cc267b-8eef-4072-a94f-43a3212466fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.655020] env[63297]: DEBUG nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1578.655196] env[63297]: DEBUG nova.network.neutron [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1578.700467] env[63297]: DEBUG nova.policy [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28ea980c339244f699047893336ee663', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54456153a472421890c889a6f2c62b38', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1578.809025] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1578.809025] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b6dfba-d68e-8500-6d7d-4b97e9d25cdf" [ 1578.809025] env[63297]: _type = "HttpNfcLease" [ 1578.809025] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1578.809429] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1578.809429] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b6dfba-d68e-8500-6d7d-4b97e9d25cdf" [ 1578.809429] env[63297]: _type = "HttpNfcLease" [ 1578.809429] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1578.810318] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836aeca2-cabd-46a1-b0bf-d1c89bd94de1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.819972] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0240c-db10-cc7b-f46e-2fc26ff406b9/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1578.820359] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0240c-db10-cc7b-f46e-2fc26ff406b9/disk-0.vmdk. {{(pid=63297) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1578.906662] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-dc1b0f99-846a-4591-b6f7-e618514971bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.908757] env[63297]: DEBUG oslo_vmware.api [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697868, 'name': PowerOnVM_Task, 'duration_secs': 0.482486} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.910011] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1578.910287] env[63297]: INFO nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Took 10.84 seconds to spawn the instance on the hypervisor. [ 1578.910740] env[63297]: DEBUG nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1578.911839] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4419d52-a431-4e81-8cd6-63ccf372d221 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.966232] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697869, 'name': Rename_Task, 'duration_secs': 0.14867} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.966573] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1578.967359] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b816c252-b8db-4c3c-a80c-4f196a08481d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.974993] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1578.974993] env[63297]: value = "task-1697870" [ 1578.974993] env[63297]: _type = "Task" [ 1578.974993] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.983748] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.064976] env[63297]: DEBUG nova.network.neutron [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Successfully created port: 8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1579.142805] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquiring lock "refresh_cache-a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.143037] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquired lock "refresh_cache-a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.143037] env[63297]: DEBUG nova.network.neutron [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1579.157658] env[63297]: DEBUG nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1579.172288] env[63297]: INFO nova.compute.manager [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Shelve offloading [ 1579.175901] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1579.177635] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afcb33a4-02e1-41bf-bd8e-230c6a9965a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.188674] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1579.188674] env[63297]: value = "task-1697871" [ 1579.188674] env[63297]: _type = "Task" [ 1579.188674] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.203647] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1579.203833] env[63297]: DEBUG nova.compute.manager [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1579.204667] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d21c9c-ad2e-4607-bb56-70b632bc82f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.211733] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.211914] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.212107] env[63297]: DEBUG nova.network.neutron [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1579.432454] env[63297]: INFO nova.compute.manager [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Took 24.15 seconds to build instance. [ 1579.486875] env[63297]: DEBUG oslo_vmware.api [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697870, 'name': PowerOnVM_Task, 'duration_secs': 0.491595} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.489320] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1579.489589] env[63297]: INFO nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Took 8.83 seconds to spawn the instance on the hypervisor. [ 1579.489873] env[63297]: DEBUG nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1579.493176] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc173c61-998c-402c-a18d-97a865be85fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.589539] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b906f9-c1d4-418b-8b14-2b3623100eea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.601634] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69c39f4-fdf5-4a8c-9f5e-c904dbd8ec30 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.636729] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f8122f-1695-4456-9fcb-0eddaf2132a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.645601] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec024c9-8898-4eee-a4b4-e88348734ebd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.666825] env[63297]: DEBUG nova.compute.provider_tree [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1579.684280] env[63297]: DEBUG nova.network.neutron [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1579.710672] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Completed reading data from the image iterator. {{(pid=63297) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1579.710965] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0240c-db10-cc7b-f46e-2fc26ff406b9/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1579.712118] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e7ebbf-cbe2-4b42-b740-ce554a72b2fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.722489] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0240c-db10-cc7b-f46e-2fc26ff406b9/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1579.722668] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0240c-db10-cc7b-f46e-2fc26ff406b9/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1579.722926] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-11b2bd15-a88c-46b0-b417-af55ce7b1f97 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.845544] env[63297]: DEBUG nova.network.neutron [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Updating instance_info_cache with network_info: [{"id": "ec16ec14-bcee-41ea-a7de-02e85d2b1169", "address": "fa:16:3e:3c:23:ab", "network": {"id": "528d622f-9aa5-47b9-aa84-42bc997b0e87", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-590811671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e8922c66c5647dd918c489f4f9bac73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec16ec14-bc", "ovs_interfaceid": "ec16ec14-bcee-41ea-a7de-02e85d2b1169", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.875336] env[63297]: DEBUG oslo_vmware.rw_handles [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0240c-db10-cc7b-f46e-2fc26ff406b9/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1579.875336] env[63297]: INFO nova.virt.vmwareapi.images [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Downloaded image file data dc932a9e-d404-4f38-8fb5-26f5f3f4b879 [ 1579.878927] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677dbfe3-52b7-464a-8183-b31acfb2a2f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.896048] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ad4cbaf-e083-4ae4-8e39-8216693c52f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.926418] env[63297]: INFO nova.virt.vmwareapi.images [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] The imported VM was unregistered [ 1579.929204] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Caching image {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1579.929441] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating directory with path [datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879 {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1579.929736] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abd3958b-3e1e-4e14-ba89-4acfd72f0fd5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.934407] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17873e9e-32fa-4d7f-ba21-7d508d67cb4f tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f5866b1e-cd77-464e-858e-eb14dab0637c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.214s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.947478] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Created directory with path [datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879 {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1579.947691] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a/OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a.vmdk to [datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879/dc932a9e-d404-4f38-8fb5-26f5f3f4b879.vmdk. {{(pid=63297) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1579.947961] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c8c70fc1-a7b1-4ee7-a56c-0ba06545254e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.953258] env[63297]: DEBUG nova.network.neutron [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Updating instance_info_cache with network_info: [{"id": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "address": "fa:16:3e:c5:63:c0", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82e9569c-63", "ovs_interfaceid": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.955508] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1579.955508] env[63297]: value = "task-1697873" [ 1579.955508] env[63297]: _type = "Task" [ 1579.955508] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.964751] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697873, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.995731] env[63297]: DEBUG nova.compute.manager [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Received event network-vif-plugged-ec16ec14-bcee-41ea-a7de-02e85d2b1169 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1579.995979] env[63297]: DEBUG oslo_concurrency.lockutils [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] Acquiring lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.996258] env[63297]: DEBUG oslo_concurrency.lockutils [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] Lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.996446] env[63297]: DEBUG oslo_concurrency.lockutils [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] Lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.996623] env[63297]: DEBUG nova.compute.manager [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] No waiting events found dispatching network-vif-plugged-ec16ec14-bcee-41ea-a7de-02e85d2b1169 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1579.996791] env[63297]: WARNING nova.compute.manager [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Received unexpected event network-vif-plugged-ec16ec14-bcee-41ea-a7de-02e85d2b1169 for instance with vm_state building and task_state spawning. [ 1579.996988] env[63297]: DEBUG nova.compute.manager [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Received event network-changed-ec16ec14-bcee-41ea-a7de-02e85d2b1169 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1579.997164] env[63297]: DEBUG nova.compute.manager [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Refreshing instance network info cache due to event network-changed-ec16ec14-bcee-41ea-a7de-02e85d2b1169. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1579.997326] env[63297]: DEBUG oslo_concurrency.lockutils [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] Acquiring lock "refresh_cache-a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.013862] env[63297]: INFO nova.compute.manager [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Took 24.08 seconds to build instance. [ 1580.170544] env[63297]: DEBUG nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1580.191254] env[63297]: ERROR nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [req-50eb965c-5349-4c75-a629-6ad664a5150c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-50eb965c-5349-4c75-a629-6ad664a5150c"}]} [ 1580.210484] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1580.214744] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1580.215074] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1580.215302] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1580.215547] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1580.215780] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1580.216026] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1580.216302] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1580.217103] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1580.217367] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1580.217625] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1580.217871] env[63297]: DEBUG nova.virt.hardware [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1580.218829] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956ab7ef-10e7-4299-8633-79f9fc6e5119 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.223730] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1580.223981] env[63297]: DEBUG nova.compute.provider_tree [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1580.229182] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e089d464-1ab5-4c54-a560-1eb60364d88d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.246317] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1580.265013] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1580.351163] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Releasing lock "refresh_cache-a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.351507] env[63297]: DEBUG nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Instance network_info: |[{"id": "ec16ec14-bcee-41ea-a7de-02e85d2b1169", "address": "fa:16:3e:3c:23:ab", "network": {"id": "528d622f-9aa5-47b9-aa84-42bc997b0e87", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-590811671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e8922c66c5647dd918c489f4f9bac73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec16ec14-bc", "ovs_interfaceid": "ec16ec14-bcee-41ea-a7de-02e85d2b1169", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1580.352736] env[63297]: DEBUG oslo_concurrency.lockutils [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] Acquired lock "refresh_cache-a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.352736] env[63297]: DEBUG nova.network.neutron [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Refreshing network info cache for port ec16ec14-bcee-41ea-a7de-02e85d2b1169 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1580.356133] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:23:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec16ec14-bcee-41ea-a7de-02e85d2b1169', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1580.363197] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Creating folder: Project (5e8922c66c5647dd918c489f4f9bac73). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1580.372361] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43be9fcc-6e5b-40d4-91b4-84b59caceef5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.392097] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Created folder: Project (5e8922c66c5647dd918c489f4f9bac73) in parent group-v353718. [ 1580.392405] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Creating folder: Instances. Parent ref: group-v353959. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1580.392718] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5cbddb45-f43e-41d6-80bd-6022529786a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.408019] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Created folder: Instances in parent group-v353959. [ 1580.408019] env[63297]: DEBUG oslo.service.loopingcall [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.408019] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1580.408019] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5b3cbd0-171f-4e90-b42e-b76ba868102a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.434276] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1580.434276] env[63297]: value = "task-1697876" [ 1580.434276] env[63297]: _type = "Task" [ 1580.434276] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.447126] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697876, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.457854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.476595] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697873, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.516540] env[63297]: DEBUG oslo_concurrency.lockutils [None req-81dcb6b0-d69c-4057-96d5-5fad16257a7e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a10df0e9-4278-48f1-b111-864ac793f630" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.588s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.656151] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2c7abf-124a-4084-8b80-0f15ce9a734d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.666854] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e327ab48-e576-46c2-8c0d-ce7a42d805c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.707616] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9c0ad9-eb07-4faa-9170-5729cf8129a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.719664] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0020a2f6-75f5-4283-acd1-a1bee0a8b53e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.739231] env[63297]: DEBUG nova.compute.provider_tree [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1580.868369] env[63297]: DEBUG nova.network.neutron [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Successfully updated port: 8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1580.953946] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697876, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.969380] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1580.970231] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0682b9f4-57f7-4301-a146-8b12b7dcf8c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.977187] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697873, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.983474] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1580.984248] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4900738-09a0-4384-8092-73af6430d995 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.029401] env[63297]: DEBUG nova.compute.manager [req-6e71efe9-1d1c-4ac2-a250-fab9a5dfcda3 req-4bd9ae0a-6dc1-4be2-9906-8fe8f45e7381 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Received event network-vif-unplugged-82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1581.029634] env[63297]: DEBUG oslo_concurrency.lockutils [req-6e71efe9-1d1c-4ac2-a250-fab9a5dfcda3 req-4bd9ae0a-6dc1-4be2-9906-8fe8f45e7381 service nova] Acquiring lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.029855] env[63297]: DEBUG oslo_concurrency.lockutils [req-6e71efe9-1d1c-4ac2-a250-fab9a5dfcda3 req-4bd9ae0a-6dc1-4be2-9906-8fe8f45e7381 service nova] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.031782] env[63297]: DEBUG oslo_concurrency.lockutils [req-6e71efe9-1d1c-4ac2-a250-fab9a5dfcda3 req-4bd9ae0a-6dc1-4be2-9906-8fe8f45e7381 service nova] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.032157] env[63297]: DEBUG nova.compute.manager [req-6e71efe9-1d1c-4ac2-a250-fab9a5dfcda3 req-4bd9ae0a-6dc1-4be2-9906-8fe8f45e7381 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] No waiting events found dispatching network-vif-unplugged-82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1581.032429] env[63297]: WARNING nova.compute.manager [req-6e71efe9-1d1c-4ac2-a250-fab9a5dfcda3 req-4bd9ae0a-6dc1-4be2-9906-8fe8f45e7381 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Received unexpected event network-vif-unplugged-82e9569c-63e6-41d2-ac37-b8d9b3d5378f for instance with vm_state shelved and task_state shelving_offloading. [ 1581.087066] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1581.087350] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1581.087492] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleting the datastore file [datastore1] 020b06c5-44e2-4f74-a1dc-d7557db3537e {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1581.087818] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a682901-0ca9-4871-a36c-9f19f4075b83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.097432] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1581.097432] env[63297]: value = "task-1697878" [ 1581.097432] env[63297]: _type = "Task" [ 1581.097432] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.115275] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.192343] env[63297]: DEBUG nova.network.neutron [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Updated VIF entry in instance network info cache for port ec16ec14-bcee-41ea-a7de-02e85d2b1169. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1581.193319] env[63297]: DEBUG nova.network.neutron [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Updating instance_info_cache with network_info: [{"id": "ec16ec14-bcee-41ea-a7de-02e85d2b1169", "address": "fa:16:3e:3c:23:ab", "network": {"id": "528d622f-9aa5-47b9-aa84-42bc997b0e87", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-590811671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e8922c66c5647dd918c489f4f9bac73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec16ec14-bc", "ovs_interfaceid": "ec16ec14-bcee-41ea-a7de-02e85d2b1169", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.225668] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.226067] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.263538] env[63297]: ERROR nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [req-c9b09152-f2fa-4315-9ef4-3e3e2d5b3d78] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c9b09152-f2fa-4315-9ef4-3e3e2d5b3d78"}]} [ 1581.287683] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1581.307419] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1581.307607] env[63297]: DEBUG nova.compute.provider_tree [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1581.321678] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1581.344288] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1581.372852] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.372852] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.372852] env[63297]: DEBUG nova.network.neutron [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1581.448563] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697876, 'name': CreateVM_Task, 'duration_secs': 0.963651} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.448930] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1581.449737] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.449737] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.450145] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1581.451694] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6d9fc2b-4dcb-433c-943b-8372dc17f5b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.457018] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1581.457018] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52356e5c-d1da-7fe6-62ab-0404159c67cd" [ 1581.457018] env[63297]: _type = "Task" [ 1581.457018] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.477183] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52356e5c-d1da-7fe6-62ab-0404159c67cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.485685] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697873, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.613564] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.695926] env[63297]: DEBUG oslo_concurrency.lockutils [req-11a8aee3-bc01-436f-aa64-dd2a0eddff44 req-ba2ddf68-29c9-476d-bf36-5b1afd90e748 service nova] Releasing lock "refresh_cache-a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.715183] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccabc56-5d3b-4616-a457-34e37d0ea801 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.728181] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3ccdbd-cf2f-47e6-a3c9-b8021acc9c19 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.733128] env[63297]: DEBUG nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1581.771188] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3de5c8-66ed-4dbd-beb7-f019f496db09 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.783704] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4258f8-1714-47fb-b3fc-43a13efd3fb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.803699] env[63297]: DEBUG nova.compute.provider_tree [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1581.920681] env[63297]: DEBUG nova.network.neutron [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1581.980374] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52356e5c-d1da-7fe6-62ab-0404159c67cd, 'name': SearchDatastore_Task, 'duration_secs': 0.093472} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.980374] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.980581] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1581.980673] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.980812] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.981045] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1581.981808] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-351a9c9b-5bec-41fa-a982-f850c1971aa1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.990502] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697873, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.000776] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1582.001130] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1582.002704] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c31b791-690f-4cfb-a4b4-e77d18a2a6cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.009639] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1582.009639] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52596377-95ab-e249-4bd4-682489a0c166" [ 1582.009639] env[63297]: _type = "Task" [ 1582.009639] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.021500] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52596377-95ab-e249-4bd4-682489a0c166, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.028064] env[63297]: DEBUG nova.compute.manager [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Received event network-vif-plugged-8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1582.028355] env[63297]: DEBUG oslo_concurrency.lockutils [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] Acquiring lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.028559] env[63297]: DEBUG oslo_concurrency.lockutils [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.028819] env[63297]: DEBUG oslo_concurrency.lockutils [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.029054] env[63297]: DEBUG nova.compute.manager [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] No waiting events found dispatching network-vif-plugged-8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1582.029239] env[63297]: WARNING nova.compute.manager [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Received unexpected event network-vif-plugged-8f272d86-3373-42d6-8f0d-94e83e8e6b2c for instance with vm_state building and task_state spawning. [ 1582.029404] env[63297]: DEBUG nova.compute.manager [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Received event network-changed-8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1582.029558] env[63297]: DEBUG nova.compute.manager [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Refreshing instance network info cache due to event network-changed-8f272d86-3373-42d6-8f0d-94e83e8e6b2c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1582.029775] env[63297]: DEBUG oslo_concurrency.lockutils [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] Acquiring lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.044639] env[63297]: INFO nova.compute.manager [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Rebuilding instance [ 1582.096600] env[63297]: DEBUG nova.compute.manager [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1582.097872] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be19f5e7-e837-417e-b8ec-1941a0a0eb48 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.103227] env[63297]: DEBUG nova.network.neutron [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating instance_info_cache with network_info: [{"id": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "address": "fa:16:3e:6a:dc:37", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f272d86-33", "ovs_interfaceid": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.120869] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.252925] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.341505] env[63297]: DEBUG nova.scheduler.client.report [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1582.341748] env[63297]: DEBUG nova.compute.provider_tree [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 117 to 118 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1582.341935] env[63297]: DEBUG nova.compute.provider_tree [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1582.483353] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697873, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.522706] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52596377-95ab-e249-4bd4-682489a0c166, 'name': SearchDatastore_Task, 'duration_secs': 0.089044} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.523588] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4df876a-9aac-43d0-9bf4-0735376bf3e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.531718] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1582.531718] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522a10c3-5dcf-593f-0e0a-1271b483eb85" [ 1582.531718] env[63297]: _type = "Task" [ 1582.531718] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.542349] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522a10c3-5dcf-593f-0e0a-1271b483eb85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.610464] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.610756] env[63297]: DEBUG nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Instance network_info: |[{"id": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "address": "fa:16:3e:6a:dc:37", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f272d86-33", "ovs_interfaceid": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1582.610876] env[63297]: DEBUG oslo_concurrency.lockutils [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] Acquired lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.611144] env[63297]: DEBUG nova.network.neutron [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Refreshing network info cache for port 8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1582.612414] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:dc:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f272d86-3373-42d6-8f0d-94e83e8e6b2c', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1582.620389] env[63297]: DEBUG oslo.service.loopingcall [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1582.626487] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1582.626487] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1582.626839] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.627278] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4952c10-591a-4c31-9489-6eaacd0d8a0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.628973] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-185397e0-9efe-46b3-b18d-96c844cdcc11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.651837] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1582.651837] env[63297]: value = "task-1697880" [ 1582.651837] env[63297]: _type = "Task" [ 1582.651837] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.656465] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1582.656465] env[63297]: value = "task-1697879" [ 1582.656465] env[63297]: _type = "Task" [ 1582.656465] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.663849] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697880, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.671478] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.847617] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.699s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.848216] env[63297]: DEBUG nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1582.851195] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.003s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.851383] env[63297]: DEBUG nova.objects.instance [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lazy-loading 'resources' on Instance uuid fba9040d-f904-44a1-8785-14d4696ea939 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1582.982736] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697873, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.831277} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.983053] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a/OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a.vmdk to [datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879/dc932a9e-d404-4f38-8fb5-26f5f3f4b879.vmdk. [ 1582.983247] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Cleaning up location [datastore1] OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1582.983410] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_65ccd623-4aba-417f-aec3-ce4cb9a9c73a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1582.983666] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abe5284f-d3d9-4647-bdd1-73493d59f78e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.990710] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1582.990710] env[63297]: value = "task-1697881" [ 1582.990710] env[63297]: _type = "Task" [ 1582.990710] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.999758] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.042781] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522a10c3-5dcf-593f-0e0a-1271b483eb85, 'name': SearchDatastore_Task, 'duration_secs': 0.08682} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.043078] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.043333] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] a0f4160e-cfb4-4d1d-bbee-6df44eb363fb/a0f4160e-cfb4-4d1d-bbee-6df44eb363fb.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.043598] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc887a81-f48c-4cd8-9955-bcaca436380f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.055614] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1583.055614] env[63297]: value = "task-1697882" [ 1583.055614] env[63297]: _type = "Task" [ 1583.055614] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.067942] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697882, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.080581] env[63297]: DEBUG nova.compute.manager [req-45cd72b4-2c9b-4bb5-a7be-345f1197adff req-4c611b58-e850-40f7-8f50-f040fa2b9031 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Received event network-changed-82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1583.080807] env[63297]: DEBUG nova.compute.manager [req-45cd72b4-2c9b-4bb5-a7be-345f1197adff req-4c611b58-e850-40f7-8f50-f040fa2b9031 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Refreshing instance network info cache due to event network-changed-82e9569c-63e6-41d2-ac37-b8d9b3d5378f. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1583.081520] env[63297]: DEBUG oslo_concurrency.lockutils [req-45cd72b4-2c9b-4bb5-a7be-345f1197adff req-4c611b58-e850-40f7-8f50-f040fa2b9031 service nova] Acquiring lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.081520] env[63297]: DEBUG oslo_concurrency.lockutils [req-45cd72b4-2c9b-4bb5-a7be-345f1197adff req-4c611b58-e850-40f7-8f50-f040fa2b9031 service nova] Acquired lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.081520] env[63297]: DEBUG nova.network.neutron [req-45cd72b4-2c9b-4bb5-a7be-345f1197adff req-4c611b58-e850-40f7-8f50-f040fa2b9031 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Refreshing network info cache for port 82e9569c-63e6-41d2-ac37-b8d9b3d5378f {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1583.116318] env[63297]: DEBUG oslo_vmware.api [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.631393} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.116318] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1583.116318] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1583.116318] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1583.140012] env[63297]: INFO nova.scheduler.client.report [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted allocations for instance 020b06c5-44e2-4f74-a1dc-d7557db3537e [ 1583.164501] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697880, 'name': CreateVM_Task, 'duration_secs': 0.442057} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.164787] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1583.165667] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.165667] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.165953] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1583.166237] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34e30803-b0c0-4eea-80ea-23b978dc75f0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.170540] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697879, 'name': PowerOffVM_Task, 'duration_secs': 0.221893} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.171119] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1583.171360] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1583.172144] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba370a9c-9f85-4a3b-bb15-c582cf1f6f8c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.176173] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1583.176173] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e31589-ecf3-256e-2101-f77838551fe2" [ 1583.176173] env[63297]: _type = "Task" [ 1583.176173] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.181757] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1583.182287] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9afa1a0-74d7-4511-973f-34d5ccd95728 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.187785] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e31589-ecf3-256e-2101-f77838551fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.009932} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.188112] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.188339] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1583.188563] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.188732] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.188919] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1583.189403] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24a3d477-0655-420d-ad5a-e178b2a4c5bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.198164] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1583.198384] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1583.199220] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63ab24f8-83df-4799-9458-294ce0e3e037 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.206097] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1583.206097] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d195f-0e91-4d1c-11c3-852c81e75f82" [ 1583.206097] env[63297]: _type = "Task" [ 1583.206097] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.216201] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d195f-0e91-4d1c-11c3-852c81e75f82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.347336] env[63297]: DEBUG nova.network.neutron [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updated VIF entry in instance network info cache for port 8f272d86-3373-42d6-8f0d-94e83e8e6b2c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1583.348032] env[63297]: DEBUG nova.network.neutron [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating instance_info_cache with network_info: [{"id": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "address": "fa:16:3e:6a:dc:37", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f272d86-33", "ovs_interfaceid": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.356286] env[63297]: DEBUG nova.compute.utils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1583.365226] env[63297]: DEBUG nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1583.365226] env[63297]: DEBUG nova.network.neutron [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1583.393637] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1583.394907] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1583.395145] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleting the datastore file [datastore1] a10df0e9-4278-48f1-b111-864ac793f630 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1583.396752] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6150f48c-ec88-44aa-9993-c6f4f46851a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.409429] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1583.409429] env[63297]: value = "task-1697884" [ 1583.409429] env[63297]: _type = "Task" [ 1583.409429] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.421527] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.429435] env[63297]: DEBUG nova.policy [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2989f36da21c421d8e244dd2663d94be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e843a2460c21477ca894b3e2846ec98d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1583.501872] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036376} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.502197] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1583.502235] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879/dc932a9e-d404-4f38-8fb5-26f5f3f4b879.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.502482] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879/dc932a9e-d404-4f38-8fb5-26f5f3f4b879.vmdk to [datastore1] 310cf8d4-613a-4c35-b118-7d79138e4799/310cf8d4-613a-4c35-b118-7d79138e4799.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.502750] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd2a14c3-839a-4635-909d-33a5e0af3040 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.511728] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1583.511728] env[63297]: value = "task-1697885" [ 1583.511728] env[63297]: _type = "Task" [ 1583.511728] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.520505] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.566312] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697882, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475172} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.569067] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] a0f4160e-cfb4-4d1d-bbee-6df44eb363fb/a0f4160e-cfb4-4d1d-bbee-6df44eb363fb.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1583.569328] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1583.569830] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-597ad3ca-d5e0-4964-a251-55c28489075a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.577261] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1583.577261] env[63297]: value = "task-1697886" [ 1583.577261] env[63297]: _type = "Task" [ 1583.577261] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.590143] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697886, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.644915] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.706559] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2304d8-5dbe-4fc5-a41a-26b60eee9d0f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.717666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e7ae88-6121-46cd-a6f7-2ff309889612 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.724723] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d195f-0e91-4d1c-11c3-852c81e75f82, 'name': SearchDatastore_Task, 'duration_secs': 0.07162} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.728210] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c3aa37a-474d-49ee-ac16-72c0e4a8b102 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.761622] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04205cc-dc92-4520-b6f8-2a38a3d84c93 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.764855] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1583.764855] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8d375-2471-512a-2609-5f8bf915c61b" [ 1583.764855] env[63297]: _type = "Task" [ 1583.764855] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.772091] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88dc98b-3ec7-4ec4-b61b-023d071e4080 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.780143] env[63297]: DEBUG nova.network.neutron [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Successfully created port: 8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1583.782097] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8d375-2471-512a-2609-5f8bf915c61b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.793569] env[63297]: DEBUG nova.compute.provider_tree [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1583.850232] env[63297]: DEBUG oslo_concurrency.lockutils [req-d805df8b-2dd2-4515-acff-1b56c59e3d84 req-f0eb0463-9dd1-4b9d-a9df-bf2581e98ac8 service nova] Releasing lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.865389] env[63297]: DEBUG nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1583.921101] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263562} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.921385] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1583.921577] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1583.921753] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1583.943728] env[63297]: DEBUG nova.network.neutron [req-45cd72b4-2c9b-4bb5-a7be-345f1197adff req-4c611b58-e850-40f7-8f50-f040fa2b9031 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Updated VIF entry in instance network info cache for port 82e9569c-63e6-41d2-ac37-b8d9b3d5378f. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1583.944139] env[63297]: DEBUG nova.network.neutron [req-45cd72b4-2c9b-4bb5-a7be-345f1197adff req-4c611b58-e850-40f7-8f50-f040fa2b9031 service nova] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Updating instance_info_cache with network_info: [{"id": "82e9569c-63e6-41d2-ac37-b8d9b3d5378f", "address": "fa:16:3e:c5:63:c0", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": null, "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap82e9569c-63", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.022713] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697885, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.090066] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697886, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078941} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.090430] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1584.091483] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64992f9b-1236-404e-9307-84f51efd4a9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.118345] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] a0f4160e-cfb4-4d1d-bbee-6df44eb363fb/a0f4160e-cfb4-4d1d-bbee-6df44eb363fb.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1584.119121] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-431be469-744f-461b-8506-52a1fa328190 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.141595] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1584.141595] env[63297]: value = "task-1697887" [ 1584.141595] env[63297]: _type = "Task" [ 1584.141595] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.151723] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697887, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.276964] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f8d375-2471-512a-2609-5f8bf915c61b, 'name': SearchDatastore_Task, 'duration_secs': 0.067758} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.277194] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.277434] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94/1d8c6df5-069f-4647-a2f6-e69a4bf8be94.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1584.277708] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d75d3c0-6918-4c43-bc48-098d23cb6c63 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.287040] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1584.287040] env[63297]: value = "task-1697888" [ 1584.287040] env[63297]: _type = "Task" [ 1584.287040] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.299230] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.330529] env[63297]: DEBUG nova.scheduler.client.report [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 118 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1584.330805] env[63297]: DEBUG nova.compute.provider_tree [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 118 to 119 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1584.330988] env[63297]: DEBUG nova.compute.provider_tree [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1584.446633] env[63297]: DEBUG oslo_concurrency.lockutils [req-45cd72b4-2c9b-4bb5-a7be-345f1197adff req-4c611b58-e850-40f7-8f50-f040fa2b9031 service nova] Releasing lock "refresh_cache-020b06c5-44e2-4f74-a1dc-d7557db3537e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.522348] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697885, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.653455] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.798465] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.837728] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.986s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.840283] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.840887] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.822s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.842823] env[63297]: INFO nova.compute.claims [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1584.863517] env[63297]: INFO nova.scheduler.client.report [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Deleted allocations for instance fba9040d-f904-44a1-8785-14d4696ea939 [ 1584.876306] env[63297]: DEBUG nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1584.904389] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1584.904691] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1584.904742] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1584.904983] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1584.905183] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1584.905918] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1584.905918] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1584.905918] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1584.905918] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1584.906264] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1584.906414] env[63297]: DEBUG nova.virt.hardware [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1584.907727] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd8c82a-ba46-4397-a653-5c937d369560 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.918664] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6049c6c5-b1df-490b-8c32-bc8cb6f7bcd8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.964923] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1584.965244] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1584.965407] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1584.965610] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1584.965835] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1584.966107] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1584.966453] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1584.966723] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1584.967085] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1584.967258] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1584.967957] env[63297]: DEBUG nova.virt.hardware [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1584.969284] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fcad10d-f0a2-4d51-a9c7-f475a48e61b9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.978937] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b48e766-ff31-4662-8ffc-f762048e95ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.996544] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:c8:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fc6d6cc-328f-4608-becc-2ab8cce8dc98', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1585.005273] env[63297]: DEBUG oslo.service.loopingcall [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1585.005980] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1585.006254] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-516ff88e-44ca-44e0-813f-90836e8d7b0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.030783] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1585.030783] env[63297]: value = "task-1697889" [ 1585.030783] env[63297]: _type = "Task" [ 1585.030783] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.034439] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697885, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.045374] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697889, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.157138] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697887, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.304369] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.325492] env[63297]: DEBUG nova.compute.manager [req-db81e9ec-47a7-47d8-8fa7-ca0d7ad6ce63 req-a8c9c7ec-6b40-466b-9f0e-42398a847d6e service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Received event network-vif-plugged-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1585.325812] env[63297]: DEBUG oslo_concurrency.lockutils [req-db81e9ec-47a7-47d8-8fa7-ca0d7ad6ce63 req-a8c9c7ec-6b40-466b-9f0e-42398a847d6e service nova] Acquiring lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.326171] env[63297]: DEBUG oslo_concurrency.lockutils [req-db81e9ec-47a7-47d8-8fa7-ca0d7ad6ce63 req-a8c9c7ec-6b40-466b-9f0e-42398a847d6e service nova] Lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.326378] env[63297]: DEBUG oslo_concurrency.lockutils [req-db81e9ec-47a7-47d8-8fa7-ca0d7ad6ce63 req-a8c9c7ec-6b40-466b-9f0e-42398a847d6e service nova] Lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.326595] env[63297]: DEBUG nova.compute.manager [req-db81e9ec-47a7-47d8-8fa7-ca0d7ad6ce63 req-a8c9c7ec-6b40-466b-9f0e-42398a847d6e service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] No waiting events found dispatching network-vif-plugged-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1585.326791] env[63297]: WARNING nova.compute.manager [req-db81e9ec-47a7-47d8-8fa7-ca0d7ad6ce63 req-a8c9c7ec-6b40-466b-9f0e-42398a847d6e service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Received unexpected event network-vif-plugged-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 for instance with vm_state building and task_state spawning. [ 1585.374680] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1ef7213b-c7e1-49fc-ab62-530776eab307 tempest-ServersNegativeTestJSON-553128732 tempest-ServersNegativeTestJSON-553128732-project-member] Lock "fba9040d-f904-44a1-8785-14d4696ea939" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.225s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.498100] env[63297]: DEBUG nova.network.neutron [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Successfully updated port: 8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1585.540231] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697885, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.553495] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697889, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.657757] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697887, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.802437] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697888, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.009687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.009687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquired lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.009687] env[63297]: DEBUG nova.network.neutron [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1586.039354] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697885, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.363545} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.039642] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/dc932a9e-d404-4f38-8fb5-26f5f3f4b879/dc932a9e-d404-4f38-8fb5-26f5f3f4b879.vmdk to [datastore1] 310cf8d4-613a-4c35-b118-7d79138e4799/310cf8d4-613a-4c35-b118-7d79138e4799.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1586.040647] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74988da-a9f2-4ee0-9c42-be76610cb901 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.078214] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 310cf8d4-613a-4c35-b118-7d79138e4799/310cf8d4-613a-4c35-b118-7d79138e4799.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1586.078514] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697889, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.085022] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-652690e9-6f47-4bb6-b81e-62e4cfcca0f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.107068] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1586.107068] env[63297]: value = "task-1697890" [ 1586.107068] env[63297]: _type = "Task" [ 1586.107068] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.118885] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697890, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.161728] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697887, 'name': ReconfigVM_Task, 'duration_secs': 2.016383} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.165514] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Reconfigured VM instance instance-00000058 to attach disk [datastore1] a0f4160e-cfb4-4d1d-bbee-6df44eb363fb/a0f4160e-cfb4-4d1d-bbee-6df44eb363fb.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1586.167326] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ac0e630-d444-436d-ba31-3739f8b56622 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.176051] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1586.176051] env[63297]: value = "task-1697891" [ 1586.176051] env[63297]: _type = "Task" [ 1586.176051] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.186297] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697891, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.296427] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb51a90-cf61-4937-bbba-9d17d2d7e4bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.309323] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697888, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.927564} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.310404] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d6f544-9468-4b3a-bd76-55eebc3896ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.313566] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94/1d8c6df5-069f-4647-a2f6-e69a4bf8be94.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1586.313786] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1586.314055] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da24f483-bb31-4676-90cd-65beeaeafdd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.349623] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29f3eda-a524-4ba5-87bc-3d65e57a733d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.352529] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1586.352529] env[63297]: value = "task-1697892" [ 1586.352529] env[63297]: _type = "Task" [ 1586.352529] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.359796] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a32b287-0042-4d3a-a827-74f10cfc8134 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.368244] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697892, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.380246] env[63297]: DEBUG nova.compute.provider_tree [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.550728] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697889, 'name': CreateVM_Task, 'duration_secs': 1.519595} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.551045] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1586.551686] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.551855] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.552210] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1586.552474] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e851583c-686c-4f1d-b21f-441e065408f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.561096] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1586.561096] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c592b4-807b-6393-08a2-92e8a6c247c1" [ 1586.561096] env[63297]: _type = "Task" [ 1586.561096] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.574691] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c592b4-807b-6393-08a2-92e8a6c247c1, 'name': SearchDatastore_Task, 'duration_secs': 0.014112} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.575537] env[63297]: DEBUG nova.network.neutron [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1586.578195] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.578195] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1586.578195] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.578195] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.578342] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1586.578521] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08faf668-c38b-4efd-894b-ba05c3f0af4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.589880] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1586.590090] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1586.590851] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8e717fb-3f16-4e21-a01c-d07b00d9b210 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.600089] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1586.600089] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d98b9-e7b5-8e88-cbc0-3af0d647d01a" [ 1586.600089] env[63297]: _type = "Task" [ 1586.600089] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.611371] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]521d98b9-e7b5-8e88-cbc0-3af0d647d01a, 'name': SearchDatastore_Task, 'duration_secs': 0.010046} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.615223] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb3a887b-e8f8-4793-9212-6e2cf07c460c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.629092] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697890, 'name': ReconfigVM_Task, 'duration_secs': 0.461193} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.629423] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1586.629423] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520a07d7-74bb-b58e-dbe9-0dbc7cb790e6" [ 1586.629423] env[63297]: _type = "Task" [ 1586.629423] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.629668] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 310cf8d4-613a-4c35-b118-7d79138e4799/310cf8d4-613a-4c35-b118-7d79138e4799.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1586.630431] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86ddcf6f-966a-4e4b-93c2-b692d1c89712 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.644577] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520a07d7-74bb-b58e-dbe9-0dbc7cb790e6, 'name': SearchDatastore_Task, 'duration_secs': 0.011976} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.645978] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.646331] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] a10df0e9-4278-48f1-b111-864ac793f630/a10df0e9-4278-48f1-b111-864ac793f630.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1586.646584] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1586.646584] env[63297]: value = "task-1697893" [ 1586.646584] env[63297]: _type = "Task" [ 1586.646584] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.646786] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-561712bd-e7a0-4771-8d16-d1e25fe9bd3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.660289] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697893, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.661902] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1586.661902] env[63297]: value = "task-1697894" [ 1586.661902] env[63297]: _type = "Task" [ 1586.661902] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.675574] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.686266] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697891, 'name': Rename_Task, 'duration_secs': 0.201241} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.686635] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1586.686962] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c522da2-869f-4de2-b46b-e34d56660c5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.694719] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1586.694719] env[63297]: value = "task-1697895" [ 1586.694719] env[63297]: _type = "Task" [ 1586.694719] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.704563] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.774247] env[63297]: DEBUG nova.network.neutron [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updating instance_info_cache with network_info: [{"id": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "address": "fa:16:3e:b9:69:22", "network": {"id": "b20022f3-067f-42e6-9029-1f69f6657c27", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-894113652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e843a2460c21477ca894b3e2846ec98d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7f6ca3-5c", "ovs_interfaceid": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.864295] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697892, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086546} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.864585] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1586.865462] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691aa1a5-6b0a-4b27-b833-b559ed3a8722 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.891178] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94/1d8c6df5-069f-4647-a2f6-e69a4bf8be94.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1586.892183] env[63297]: DEBUG nova.scheduler.client.report [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1586.895605] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f52733d7-9b09-484e-a8e2-6939844d00d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.919848] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1586.919848] env[63297]: value = "task-1697896" [ 1586.919848] env[63297]: _type = "Task" [ 1586.919848] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.930190] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697896, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.159527] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697893, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.176702] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697894, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.208135] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697895, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.277350] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Releasing lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.277677] env[63297]: DEBUG nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Instance network_info: |[{"id": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "address": "fa:16:3e:b9:69:22", "network": {"id": "b20022f3-067f-42e6-9029-1f69f6657c27", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-894113652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e843a2460c21477ca894b3e2846ec98d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7f6ca3-5c", "ovs_interfaceid": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1587.277875] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:69:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c7f6ca3-5c56-4270-88a0-28ddd06a0256', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1587.286411] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Creating folder: Project (e843a2460c21477ca894b3e2846ec98d). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1587.286811] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94fa2cdb-36d0-43e3-8b58-4dc4503460bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.303327] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Created folder: Project (e843a2460c21477ca894b3e2846ec98d) in parent group-v353718. [ 1587.303327] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Creating folder: Instances. Parent ref: group-v353964. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1587.303327] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-864d016c-b6e4-4db0-b2c8-3c09ad294cb8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.316155] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Created folder: Instances in parent group-v353964. [ 1587.316410] env[63297]: DEBUG oslo.service.loopingcall [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1587.317533] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1587.317533] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90c76176-22d1-4d44-85d6-ebf9c7b5e23d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.339593] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1587.339593] env[63297]: value = "task-1697899" [ 1587.339593] env[63297]: _type = "Task" [ 1587.339593] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.359079] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697899, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.360438] env[63297]: DEBUG nova.compute.manager [req-c45f1b21-bc5e-4b8c-ba3a-da2d4b9e3e81 req-19c222a7-6762-4f8c-a1dd-b3297d20bbad service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Received event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1587.360620] env[63297]: DEBUG nova.compute.manager [req-c45f1b21-bc5e-4b8c-ba3a-da2d4b9e3e81 req-19c222a7-6762-4f8c-a1dd-b3297d20bbad service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing instance network info cache due to event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1587.361103] env[63297]: DEBUG oslo_concurrency.lockutils [req-c45f1b21-bc5e-4b8c-ba3a-da2d4b9e3e81 req-19c222a7-6762-4f8c-a1dd-b3297d20bbad service nova] Acquiring lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.361248] env[63297]: DEBUG oslo_concurrency.lockutils [req-c45f1b21-bc5e-4b8c-ba3a-da2d4b9e3e81 req-19c222a7-6762-4f8c-a1dd-b3297d20bbad service nova] Acquired lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.361463] env[63297]: DEBUG nova.network.neutron [req-c45f1b21-bc5e-4b8c-ba3a-da2d4b9e3e81 req-19c222a7-6762-4f8c-a1dd-b3297d20bbad service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1587.415126] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.415876] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.853s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.415876] env[63297]: DEBUG nova.objects.instance [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lazy-loading 'resources' on Instance uuid ac112251-8cc3-4f57-8983-8a07e2a068f8 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1587.439582] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697896, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.595825] env[63297]: DEBUG nova.compute.manager [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1587.596909] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8925c55e-839d-48a6-b1f0-823eb84e037d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.661053] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697893, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.675692] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535665} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.675994] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] a10df0e9-4278-48f1-b111-864ac793f630/a10df0e9-4278-48f1-b111-864ac793f630.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1587.676187] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1587.676445] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e9dc115-340f-438e-aba2-e3a7b23e5f0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.683172] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1587.683172] env[63297]: value = "task-1697900" [ 1587.683172] env[63297]: _type = "Task" [ 1587.683172] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.698413] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697900, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.707738] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697895, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.849574] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697899, 'name': CreateVM_Task, 'duration_secs': 0.426951} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.849764] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1587.850480] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.850645] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.850967] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1587.851246] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f1038c9-428f-4f1e-81c8-9d4d71ce3600 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.856051] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1587.856051] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b9a9e4-474e-e911-f237-b1112bd23d79" [ 1587.856051] env[63297]: _type = "Task" [ 1587.856051] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.865524] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b9a9e4-474e-e911-f237-b1112bd23d79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.919015] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "ca4d8eee-de0e-4a40-89a3-fcfd154d0196" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.919015] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "ca4d8eee-de0e-4a40-89a3-fcfd154d0196" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.936705] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697896, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.107921] env[63297]: INFO nova.compute.manager [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] instance snapshotting [ 1588.108662] env[63297]: DEBUG nova.objects.instance [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'flavor' on Instance uuid b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1588.136618] env[63297]: DEBUG nova.network.neutron [req-c45f1b21-bc5e-4b8c-ba3a-da2d4b9e3e81 req-19c222a7-6762-4f8c-a1dd-b3297d20bbad service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updated VIF entry in instance network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1588.136618] env[63297]: DEBUG nova.network.neutron [req-c45f1b21-bc5e-4b8c-ba3a-da2d4b9e3e81 req-19c222a7-6762-4f8c-a1dd-b3297d20bbad service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updating instance_info_cache with network_info: [{"id": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "address": "fa:16:3e:b9:69:22", "network": {"id": "b20022f3-067f-42e6-9029-1f69f6657c27", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-894113652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e843a2460c21477ca894b3e2846ec98d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7f6ca3-5c", "ovs_interfaceid": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.163086] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697893, 'name': Rename_Task, 'duration_secs': 1.200572} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.163443] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1588.163697] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a6a1fd1-05f4-4111-8a48-01d49845a463 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.169902] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1588.169902] env[63297]: value = "task-1697901" [ 1588.169902] env[63297]: _type = "Task" [ 1588.169902] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.178441] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.191971] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697900, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133075} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.192347] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1588.193083] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec19522b-1c7a-440c-b95c-734e6cf92923 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.220028] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] a10df0e9-4278-48f1-b111-864ac793f630/a10df0e9-4278-48f1-b111-864ac793f630.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1588.223171] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ef38ffa-617d-4ad2-98ac-58b0890f4384 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.241916] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697895, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.247166] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1588.247166] env[63297]: value = "task-1697902" [ 1588.247166] env[63297]: _type = "Task" [ 1588.247166] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.257194] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697902, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.304941] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544d1423-e653-4f7b-a65c-d2d0f006cab5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.313148] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34161881-bca5-45f9-be94-589ef9603b4f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.346014] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52aa3287-694d-4936-b6f5-79a7dfc5c9a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.354103] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89984b1b-8d67-41af-a77f-fca3a8c66b5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.371663] env[63297]: DEBUG nova.compute.provider_tree [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1588.376981] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b9a9e4-474e-e911-f237-b1112bd23d79, 'name': SearchDatastore_Task, 'duration_secs': 0.020136} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.377631] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.377860] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1588.378122] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.378363] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.378448] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1588.378738] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42f87509-155b-4bab-9cf2-933775947b68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.401903] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1588.402150] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1588.402931] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48cc05be-97fb-48a7-924f-fc78d89ccf52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.409636] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1588.409636] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e11a62-46b5-2621-79fb-73b0a8806453" [ 1588.409636] env[63297]: _type = "Task" [ 1588.409636] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.418046] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e11a62-46b5-2621-79fb-73b0a8806453, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.423824] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "ca4d8eee-de0e-4a40-89a3-fcfd154d0196" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.424971] env[63297]: DEBUG nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1588.439602] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697896, 'name': ReconfigVM_Task, 'duration_secs': 1.508892} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.439602] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94/1d8c6df5-069f-4647-a2f6-e69a4bf8be94.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1588.439602] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74d31621-1f3e-4226-b188-185394d2c2af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.444777] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1588.444777] env[63297]: value = "task-1697903" [ 1588.444777] env[63297]: _type = "Task" [ 1588.444777] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.457786] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697903, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.613528] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cb49fc-092a-408c-a990-4d7d49fd091e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.634662] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc614ce-9e16-45e3-b4c0-c9d4ecc88251 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.639200] env[63297]: DEBUG oslo_concurrency.lockutils [req-c45f1b21-bc5e-4b8c-ba3a-da2d4b9e3e81 req-19c222a7-6762-4f8c-a1dd-b3297d20bbad service nova] Releasing lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.680383] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697901, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.707882] env[63297]: DEBUG oslo_vmware.api [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697895, 'name': PowerOnVM_Task, 'duration_secs': 1.878172} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.707882] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1588.708267] env[63297]: INFO nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Took 11.05 seconds to spawn the instance on the hypervisor. [ 1588.708393] env[63297]: DEBUG nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1588.709287] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a11d70-e5bc-4369-b381-2a098ac5608d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.762797] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697902, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.919771] env[63297]: DEBUG nova.scheduler.client.report [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1588.919771] env[63297]: DEBUG nova.compute.provider_tree [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 119 to 120 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1588.919771] env[63297]: DEBUG nova.compute.provider_tree [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1588.925751] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e11a62-46b5-2621-79fb-73b0a8806453, 'name': SearchDatastore_Task, 'duration_secs': 0.022556} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.927437] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-184b4f1b-e4fb-4f45-9c4a-ec42c3c1ad76 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.933280] env[63297]: DEBUG nova.compute.utils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1588.939926] env[63297]: DEBUG nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1588.939926] env[63297]: DEBUG nova.network.neutron [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1588.940822] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1588.940822] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d6437-f0e1-a122-8ad9-af5cb8ef90d6" [ 1588.940822] env[63297]: _type = "Task" [ 1588.940822] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.953483] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d6437-f0e1-a122-8ad9-af5cb8ef90d6, 'name': SearchDatastore_Task, 'duration_secs': 0.00918} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.957066] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.957346] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e/6c1aa85a-ee37-461b-ad8a-7fbb525e836e.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1588.957894] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697903, 'name': Rename_Task, 'duration_secs': 0.214071} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.958127] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53aaf475-d696-4f34-aef7-b5f2f50601ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.960785] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1588.961348] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19a0d794-fe0f-4da8-9767-bd88055927ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.968478] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1588.968478] env[63297]: value = "task-1697904" [ 1588.968478] env[63297]: _type = "Task" [ 1588.968478] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.970220] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1588.970220] env[63297]: value = "task-1697905" [ 1588.970220] env[63297]: _type = "Task" [ 1588.970220] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.979517] env[63297]: DEBUG nova.policy [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c10cd2e8c79f4c3e9c6618b49c0659a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ffaef3dbc9114b15bd03c1fb6af708ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1588.983965] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.989964] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.147920] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1589.147920] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-aca559f8-52d7-4a16-bd37-8fade76bc96f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.154126] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1589.154126] env[63297]: value = "task-1697906" [ 1589.154126] env[63297]: _type = "Task" [ 1589.154126] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.168053] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697906, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.181382] env[63297]: DEBUG oslo_vmware.api [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697901, 'name': PowerOnVM_Task, 'duration_secs': 0.695909} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.181863] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1589.182263] env[63297]: INFO nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Took 15.92 seconds to spawn the instance on the hypervisor. [ 1589.182647] env[63297]: DEBUG nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1589.183653] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea1a8c6-540e-4bc4-bc29-6022de9baf75 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.233591] env[63297]: INFO nova.compute.manager [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Took 29.69 seconds to build instance. [ 1589.259737] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697902, 'name': ReconfigVM_Task, 'duration_secs': 0.517407} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.260223] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Reconfigured VM instance instance-00000056 to attach disk [datastore1] a10df0e9-4278-48f1-b111-864ac793f630/a10df0e9-4278-48f1-b111-864ac793f630.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1589.260733] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b512b071-a336-498b-b3f0-f6edcf99b2f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.269020] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1589.269020] env[63297]: value = "task-1697907" [ 1589.269020] env[63297]: _type = "Task" [ 1589.269020] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.280502] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697907, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.295989] env[63297]: DEBUG nova.network.neutron [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Successfully created port: 9498ba84-eb30-4513-a275-7c6726afd80b {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1589.430023] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.431321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.658s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.431770] env[63297]: DEBUG nova.objects.instance [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lazy-loading 'resources' on Instance uuid be532612-7192-4771-a3dc-25bd1dc6be6b {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1589.444034] env[63297]: DEBUG nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1589.462090] env[63297]: INFO nova.scheduler.client.report [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Deleted allocations for instance ac112251-8cc3-4f57-8983-8a07e2a068f8 [ 1589.494751] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697905, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.500229] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697904, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.665368] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697906, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.705981] env[63297]: INFO nova.compute.manager [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Took 32.85 seconds to build instance. [ 1589.734509] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9145a417-8647-4625-b0b2-f52e664ae67f tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.206s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.781366] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697907, 'name': Rename_Task, 'duration_secs': 0.281571} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.781818] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1589.781971] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31eef7ba-eaad-4fcd-90de-769478054cb1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.788541] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1589.788541] env[63297]: value = "task-1697908" [ 1589.788541] env[63297]: _type = "Task" [ 1589.788541] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.797926] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.973037] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f2f17528-7b2d-47da-89b3-3302a6dc9816 tempest-ServersTestManualDisk-383595178 tempest-ServersTestManualDisk-383595178-project-member] Lock "ac112251-8cc3-4f57-8983-8a07e2a068f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.674s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.985960] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697905, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682868} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.991588] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e/6c1aa85a-ee37-461b-ad8a-7fbb525e836e.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1589.991821] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1589.992131] env[63297]: DEBUG oslo_vmware.api [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1697904, 'name': PowerOnVM_Task, 'duration_secs': 0.974843} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.993205] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c16ef3c-7acd-4661-ab16-7377811b2fc8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.996712] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1589.997025] env[63297]: INFO nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Took 9.83 seconds to spawn the instance on the hypervisor. [ 1589.997266] env[63297]: DEBUG nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1589.998594] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb384912-655c-4cf5-8109-66ffc2336b46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.011925] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1590.011925] env[63297]: value = "task-1697909" [ 1590.011925] env[63297]: _type = "Task" [ 1590.011925] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.029191] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697909, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.173466] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697906, 'name': CreateSnapshot_Task, 'duration_secs': 0.724982} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.176450] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1590.177168] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4ae073-6894-43ff-8ab2-4e086a9012eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.211456] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bdfe81ca-1508-4039-b604-c91459c33fa6 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "310cf8d4-613a-4c35-b118-7d79138e4799" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.366s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.300629] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697908, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.304571] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c27e89-7f19-41a8-88b9-cb5acd46be32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.312019] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6feae2b2-74a1-486f-b43b-981daf7c19c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.342145] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8107d80-b68f-476a-a332-5b9567216e94 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.352122] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4421fda-f340-4aa4-bc76-ce99db0daa74 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.368867] env[63297]: DEBUG nova.compute.provider_tree [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1590.431399] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquiring lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.431701] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.431898] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquiring lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.432093] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.432265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.434672] env[63297]: INFO nova.compute.manager [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Terminating instance [ 1590.436466] env[63297]: DEBUG nova.compute.manager [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1590.436687] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1590.438120] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7de56b-7144-443f-aab6-7bf98da8d714 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.446359] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1590.446571] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e002047a-fa1f-4c9d-9788-3ab49e9158e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.454289] env[63297]: DEBUG nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1590.455746] env[63297]: DEBUG oslo_vmware.api [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1590.455746] env[63297]: value = "task-1697910" [ 1590.455746] env[63297]: _type = "Task" [ 1590.455746] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.466029] env[63297]: DEBUG oslo_vmware.api [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.479598] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1590.479598] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1590.479598] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1590.479813] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1590.479902] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1590.480611] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1590.480914] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1590.481070] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1590.481247] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1590.481448] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1590.481678] env[63297]: DEBUG nova.virt.hardware [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1590.482660] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d6475a-1b2e-457f-81c9-6736fbf93f00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.491075] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71940add-8229-4b90-8315-83ddf4b8c77e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.522394] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697909, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070259} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.522835] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1590.523432] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec664b4b-0a1f-4cfc-9fab-de244739f214 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.555241] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e/6c1aa85a-ee37-461b-ad8a-7fbb525e836e.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1590.555909] env[63297]: INFO nova.compute.manager [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Took 29.33 seconds to build instance. [ 1590.556924] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb623fa2-4ca9-4b1c-8dc3-ed9afd05f0e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.580969] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1590.580969] env[63297]: value = "task-1697911" [ 1590.580969] env[63297]: _type = "Task" [ 1590.580969] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.590955] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697911, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.700799] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1590.701548] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c780b5f4-215a-4c80-a3ef-dee3a02d1dbf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.710176] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1590.710176] env[63297]: value = "task-1697912" [ 1590.710176] env[63297]: _type = "Task" [ 1590.710176] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.721488] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697912, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.725824] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "310cf8d4-613a-4c35-b118-7d79138e4799" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.726086] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "310cf8d4-613a-4c35-b118-7d79138e4799" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.726572] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "310cf8d4-613a-4c35-b118-7d79138e4799-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.726572] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "310cf8d4-613a-4c35-b118-7d79138e4799-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.727481] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "310cf8d4-613a-4c35-b118-7d79138e4799-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.729419] env[63297]: INFO nova.compute.manager [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Terminating instance [ 1590.731054] env[63297]: DEBUG nova.compute.manager [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1590.731054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1590.731910] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27e4011-e733-4c97-a07e-497e163ba92c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.739383] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1590.739691] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ca66cf8-a683-43c0-bb09-f675484a4eed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.747335] env[63297]: DEBUG oslo_vmware.api [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1590.747335] env[63297]: value = "task-1697913" [ 1590.747335] env[63297]: _type = "Task" [ 1590.747335] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.758576] env[63297]: DEBUG oslo_vmware.api [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697913, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.808034] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697908, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.875167] env[63297]: DEBUG nova.scheduler.client.report [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1590.973476] env[63297]: DEBUG oslo_vmware.api [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697910, 'name': PowerOffVM_Task, 'duration_secs': 0.366817} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.973476] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1590.973476] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1590.973476] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-842e0633-439c-4e5f-9a48-2e8c810fc94e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.011182] env[63297]: DEBUG nova.compute.manager [req-acbbf7b2-8049-4610-971f-674a2f39d055 req-70932701-0206-4428-b3a6-a753803e63e1 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] Received event network-vif-plugged-9498ba84-eb30-4513-a275-7c6726afd80b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1591.011397] env[63297]: DEBUG oslo_concurrency.lockutils [req-acbbf7b2-8049-4610-971f-674a2f39d055 req-70932701-0206-4428-b3a6-a753803e63e1 service nova] Acquiring lock "cc644ecc-7340-421c-b966-19145eb82949-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.011472] env[63297]: DEBUG oslo_concurrency.lockutils [req-acbbf7b2-8049-4610-971f-674a2f39d055 req-70932701-0206-4428-b3a6-a753803e63e1 service nova] Lock "cc644ecc-7340-421c-b966-19145eb82949-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.011831] env[63297]: DEBUG oslo_concurrency.lockutils [req-acbbf7b2-8049-4610-971f-674a2f39d055 req-70932701-0206-4428-b3a6-a753803e63e1 service nova] Lock "cc644ecc-7340-421c-b966-19145eb82949-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.011831] env[63297]: DEBUG nova.compute.manager [req-acbbf7b2-8049-4610-971f-674a2f39d055 req-70932701-0206-4428-b3a6-a753803e63e1 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] No waiting events found dispatching network-vif-plugged-9498ba84-eb30-4513-a275-7c6726afd80b {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1591.011944] env[63297]: WARNING nova.compute.manager [req-acbbf7b2-8049-4610-971f-674a2f39d055 req-70932701-0206-4428-b3a6-a753803e63e1 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] Received unexpected event network-vif-plugged-9498ba84-eb30-4513-a275-7c6726afd80b for instance with vm_state building and task_state spawning. [ 1591.074502] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0fa9195d-f439-4af2-ba5b-8b02ce3c9f20 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.866s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.088849] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1591.089423] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1591.089423] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Deleting the datastore file [datastore1] a0f4160e-cfb4-4d1d-bbee-6df44eb363fb {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1591.093953] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4800aab1-26a7-4c16-a8ac-080919da95fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.096754] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697911, 'name': ReconfigVM_Task, 'duration_secs': 0.344766} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.097064] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e/6c1aa85a-ee37-461b-ad8a-7fbb525e836e.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1591.098324] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e486d50b-1ca0-443d-a22f-37c1d449c879 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.102313] env[63297]: DEBUG oslo_vmware.api [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for the task: (returnval){ [ 1591.102313] env[63297]: value = "task-1697915" [ 1591.102313] env[63297]: _type = "Task" [ 1591.102313] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.107671] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1591.107671] env[63297]: value = "task-1697916" [ 1591.107671] env[63297]: _type = "Task" [ 1591.107671] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.113876] env[63297]: DEBUG oslo_vmware.api [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.118663] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697916, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.160326] env[63297]: DEBUG nova.network.neutron [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Successfully updated port: 9498ba84-eb30-4513-a275-7c6726afd80b {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1591.220086] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697912, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.257865] env[63297]: DEBUG oslo_vmware.api [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697913, 'name': PowerOffVM_Task, 'duration_secs': 0.260716} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.258101] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1591.258313] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1591.258575] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c508b63a-7c2d-48f4-92df-bda67f7ed4b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.303392] env[63297]: DEBUG oslo_vmware.api [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697908, 'name': PowerOnVM_Task, 'duration_secs': 1.053568} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.303669] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1591.303874] env[63297]: DEBUG nova.compute.manager [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1591.306032] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7879d7d6-e964-4446-9f9a-13f08870801e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.371054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1591.371054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1591.371054] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleting the datastore file [datastore1] 310cf8d4-613a-4c35-b118-7d79138e4799 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1591.371054] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8cf3327-e6cb-413b-a900-f46bc9cc31ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.377954] env[63297]: DEBUG oslo_vmware.api [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1591.377954] env[63297]: value = "task-1697918" [ 1591.377954] env[63297]: _type = "Task" [ 1591.377954] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.383302] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.389033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.135s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.389777] env[63297]: INFO nova.compute.claims [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1591.403030] env[63297]: DEBUG oslo_vmware.api [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.420057] env[63297]: INFO nova.scheduler.client.report [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Deleted allocations for instance be532612-7192-4771-a3dc-25bd1dc6be6b [ 1591.615978] env[63297]: DEBUG oslo_vmware.api [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Task: {'id': task-1697915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13818} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.616736] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1591.616876] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1591.617099] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1591.617232] env[63297]: INFO nova.compute.manager [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1591.617682] env[63297]: DEBUG oslo.service.loopingcall [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.617682] env[63297]: DEBUG nova.compute.manager [-] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1591.617800] env[63297]: DEBUG nova.network.neutron [-] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1591.623103] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697916, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.668836] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "refresh_cache-cc644ecc-7340-421c-b966-19145eb82949" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.669012] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquired lock "refresh_cache-cc644ecc-7340-421c-b966-19145eb82949" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.669269] env[63297]: DEBUG nova.network.neutron [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1591.689038] env[63297]: DEBUG nova.compute.manager [req-6efcfe78-292c-45b8-afd8-53c732a8300c req-7a30f22b-e51c-425f-b243-357b65240aca service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Received event network-changed-8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1591.689435] env[63297]: DEBUG nova.compute.manager [req-6efcfe78-292c-45b8-afd8-53c732a8300c req-7a30f22b-e51c-425f-b243-357b65240aca service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Refreshing instance network info cache due to event network-changed-8f272d86-3373-42d6-8f0d-94e83e8e6b2c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1591.689823] env[63297]: DEBUG oslo_concurrency.lockutils [req-6efcfe78-292c-45b8-afd8-53c732a8300c req-7a30f22b-e51c-425f-b243-357b65240aca service nova] Acquiring lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.690131] env[63297]: DEBUG oslo_concurrency.lockutils [req-6efcfe78-292c-45b8-afd8-53c732a8300c req-7a30f22b-e51c-425f-b243-357b65240aca service nova] Acquired lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.690423] env[63297]: DEBUG nova.network.neutron [req-6efcfe78-292c-45b8-afd8-53c732a8300c req-7a30f22b-e51c-425f-b243-357b65240aca service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Refreshing network info cache for port 8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1591.722140] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697912, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.828024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.893227] env[63297]: DEBUG oslo_vmware.api [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139399} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.893528] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1591.893666] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1591.893839] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1591.894052] env[63297]: INFO nova.compute.manager [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1591.894323] env[63297]: DEBUG oslo.service.loopingcall [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.894531] env[63297]: DEBUG nova.compute.manager [-] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1591.894670] env[63297]: DEBUG nova.network.neutron [-] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1591.935038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-95ebc5f4-3b9e-49f4-8e22-73c8051f2872 tempest-ServerDiagnosticsTest-1743182437 tempest-ServerDiagnosticsTest-1743182437-project-member] Lock "be532612-7192-4771-a3dc-25bd1dc6be6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.174s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.124630] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697916, 'name': Rename_Task, 'duration_secs': 0.912143} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.124630] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1592.124630] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9fc119f-fcad-4b00-a73c-0288986ad9e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.133089] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1592.133089] env[63297]: value = "task-1697919" [ 1592.133089] env[63297]: _type = "Task" [ 1592.133089] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.142650] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697919, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.232598] env[63297]: DEBUG nova.network.neutron [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1592.234799] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697912, 'name': CloneVM_Task, 'duration_secs': 1.451982} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.234985] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Created linked-clone VM from snapshot [ 1592.236211] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a83d67e-442d-45e0-ab2f-8bd72cf31776 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.248655] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Uploading image 6e1de458-527b-4161-a12d-2d2d0f5efddb {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1592.277582] env[63297]: DEBUG oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1592.277582] env[63297]: value = "vm-353968" [ 1592.277582] env[63297]: _type = "VirtualMachine" [ 1592.277582] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1592.279170] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-80163505-7345-4022-8f9a-de6726fdd4ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.288093] env[63297]: DEBUG oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease: (returnval){ [ 1592.288093] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dbdcdf-34a7-3ad0-12ce-bda6d7c3a00f" [ 1592.288093] env[63297]: _type = "HttpNfcLease" [ 1592.288093] env[63297]: } obtained for exporting VM: (result){ [ 1592.288093] env[63297]: value = "vm-353968" [ 1592.288093] env[63297]: _type = "VirtualMachine" [ 1592.288093] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1592.289843] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the lease: (returnval){ [ 1592.289843] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dbdcdf-34a7-3ad0-12ce-bda6d7c3a00f" [ 1592.289843] env[63297]: _type = "HttpNfcLease" [ 1592.289843] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1592.298781] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1592.298781] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dbdcdf-34a7-3ad0-12ce-bda6d7c3a00f" [ 1592.298781] env[63297]: _type = "HttpNfcLease" [ 1592.298781] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1592.504569] env[63297]: DEBUG nova.network.neutron [-] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.641737] env[63297]: DEBUG nova.network.neutron [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Updating instance_info_cache with network_info: [{"id": "9498ba84-eb30-4513-a275-7c6726afd80b", "address": "fa:16:3e:1f:45:8b", "network": {"id": "56f6b69a-4250-4ac9-9ac5-fe2ec3ac63ae", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-342976745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffaef3dbc9114b15bd03c1fb6af708ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9498ba84-eb", "ovs_interfaceid": "9498ba84-eb30-4513-a275-7c6726afd80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.648954] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697919, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.809714] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1592.809714] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dbdcdf-34a7-3ad0-12ce-bda6d7c3a00f" [ 1592.809714] env[63297]: _type = "HttpNfcLease" [ 1592.809714] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1592.813113] env[63297]: DEBUG oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1592.813113] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52dbdcdf-34a7-3ad0-12ce-bda6d7c3a00f" [ 1592.813113] env[63297]: _type = "HttpNfcLease" [ 1592.813113] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1592.814614] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bf5b2e-1d08-448b-b8f8-7481a66e1b0c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.824533] env[63297]: DEBUG oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a5093a-174e-a5ae-192c-4db824163944/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1592.824733] env[63297]: DEBUG oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a5093a-174e-a5ae-192c-4db824163944/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1592.900191] env[63297]: DEBUG nova.network.neutron [-] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.903487] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aeea358-1e5d-4475-8884-a77169247799 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.914993] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6c4fb1-e721-488a-85df-44efa4ca945e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.919925] env[63297]: DEBUG nova.network.neutron [req-6efcfe78-292c-45b8-afd8-53c732a8300c req-7a30f22b-e51c-425f-b243-357b65240aca service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updated VIF entry in instance network info cache for port 8f272d86-3373-42d6-8f0d-94e83e8e6b2c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1592.920298] env[63297]: DEBUG nova.network.neutron [req-6efcfe78-292c-45b8-afd8-53c732a8300c req-7a30f22b-e51c-425f-b243-357b65240aca service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating instance_info_cache with network_info: [{"id": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "address": "fa:16:3e:6a:dc:37", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f272d86-33", "ovs_interfaceid": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.956563] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0a6148-a907-43d2-a253-2b769671cb3c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.969302] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-36e79c15-97b2-468c-872d-362a03e1fc95 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.973753] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5047e98d-855a-43be-a8ef-b85f0465b7bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.992262] env[63297]: DEBUG nova.compute.provider_tree [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1593.014047] env[63297]: INFO nova.compute.manager [-] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Took 1.40 seconds to deallocate network for instance. [ 1593.067876] env[63297]: DEBUG nova.compute.manager [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] Received event network-changed-9498ba84-eb30-4513-a275-7c6726afd80b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1593.068096] env[63297]: DEBUG nova.compute.manager [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] Refreshing instance network info cache due to event network-changed-9498ba84-eb30-4513-a275-7c6726afd80b. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1593.068324] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] Acquiring lock "refresh_cache-cc644ecc-7340-421c-b966-19145eb82949" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.145963] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Releasing lock "refresh_cache-cc644ecc-7340-421c-b966-19145eb82949" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.148502] env[63297]: DEBUG nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Instance network_info: |[{"id": "9498ba84-eb30-4513-a275-7c6726afd80b", "address": "fa:16:3e:1f:45:8b", "network": {"id": "56f6b69a-4250-4ac9-9ac5-fe2ec3ac63ae", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-342976745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffaef3dbc9114b15bd03c1fb6af708ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9498ba84-eb", "ovs_interfaceid": "9498ba84-eb30-4513-a275-7c6726afd80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1593.148502] env[63297]: DEBUG oslo_vmware.api [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697919, 'name': PowerOnVM_Task, 'duration_secs': 0.646952} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.148502] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] Acquired lock "refresh_cache-cc644ecc-7340-421c-b966-19145eb82949" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.148502] env[63297]: DEBUG nova.network.neutron [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] Refreshing network info cache for port 9498ba84-eb30-4513-a275-7c6726afd80b {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1593.149101] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:45:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9498ba84-eb30-4513-a275-7c6726afd80b', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1593.156798] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Creating folder: Project (ffaef3dbc9114b15bd03c1fb6af708ea). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1593.157165] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1593.157454] env[63297]: INFO nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Took 8.28 seconds to spawn the instance on the hypervisor. [ 1593.157583] env[63297]: DEBUG nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1593.160400] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65e3a587-323d-437b-853e-9eeddebdaddf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.162526] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b546b397-3ef5-4b37-bda1-fa33bde864b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.178710] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Created folder: Project (ffaef3dbc9114b15bd03c1fb6af708ea) in parent group-v353718. [ 1593.178941] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Creating folder: Instances. Parent ref: group-v353969. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1593.179245] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6ca0846-3a33-414d-b59e-a11417fedac3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.191959] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Created folder: Instances in parent group-v353969. [ 1593.191959] env[63297]: DEBUG oslo.service.loopingcall [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.193224] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc644ecc-7340-421c-b966-19145eb82949] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1593.195091] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3525900e-eaed-4fde-9c26-c1261168ae49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.219874] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1593.219874] env[63297]: value = "task-1697923" [ 1593.219874] env[63297]: _type = "Task" [ 1593.219874] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.230358] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697923, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.313463] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "a10df0e9-4278-48f1-b111-864ac793f630" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.313791] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a10df0e9-4278-48f1-b111-864ac793f630" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.314011] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "a10df0e9-4278-48f1-b111-864ac793f630-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.314211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a10df0e9-4278-48f1-b111-864ac793f630-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.314382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a10df0e9-4278-48f1-b111-864ac793f630-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.321022] env[63297]: INFO nova.compute.manager [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Terminating instance [ 1593.321170] env[63297]: DEBUG nova.compute.manager [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1593.321403] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1593.322326] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38399e96-d641-43b8-9d1a-480a4a447a1d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.332166] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1593.332360] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df43aa68-4759-4fe4-9b84-d68579b6d2b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.342927] env[63297]: DEBUG oslo_vmware.api [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1593.342927] env[63297]: value = "task-1697924" [ 1593.342927] env[63297]: _type = "Task" [ 1593.342927] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.354300] env[63297]: DEBUG oslo_vmware.api [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.408644] env[63297]: INFO nova.compute.manager [-] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Took 1.51 seconds to deallocate network for instance. [ 1593.423951] env[63297]: DEBUG oslo_concurrency.lockutils [req-6efcfe78-292c-45b8-afd8-53c732a8300c req-7a30f22b-e51c-425f-b243-357b65240aca service nova] Releasing lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.522469] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.538583] env[63297]: DEBUG nova.scheduler.client.report [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1593.543650] env[63297]: DEBUG nova.compute.provider_tree [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 120 to 121 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1593.543650] env[63297]: DEBUG nova.compute.provider_tree [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1593.686199] env[63297]: INFO nova.compute.manager [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Took 26.14 seconds to build instance. [ 1593.731531] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697923, 'name': CreateVM_Task, 'duration_secs': 0.450601} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.731861] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc644ecc-7340-421c-b966-19145eb82949] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1593.732684] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.732864] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.733447] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1593.734193] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-175bd969-b2a8-45c2-98a3-50be440b8a2f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.740629] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1593.740629] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5218dd2b-00e6-9e1b-62b0-127c10d3c29d" [ 1593.740629] env[63297]: _type = "Task" [ 1593.740629] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.750657] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5218dd2b-00e6-9e1b-62b0-127c10d3c29d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.857768] env[63297]: DEBUG oslo_vmware.api [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697924, 'name': PowerOffVM_Task, 'duration_secs': 0.243723} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.858227] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1593.858463] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1593.859077] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d32a80d-2342-4ee3-81c5-24118ac74348 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.919478] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.999309] env[63297]: DEBUG nova.network.neutron [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] Updated VIF entry in instance network info cache for port 9498ba84-eb30-4513-a275-7c6726afd80b. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1593.999309] env[63297]: DEBUG nova.network.neutron [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] Updating instance_info_cache with network_info: [{"id": "9498ba84-eb30-4513-a275-7c6726afd80b", "address": "fa:16:3e:1f:45:8b", "network": {"id": "56f6b69a-4250-4ac9-9ac5-fe2ec3ac63ae", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-342976745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffaef3dbc9114b15bd03c1fb6af708ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9498ba84-eb", "ovs_interfaceid": "9498ba84-eb30-4513-a275-7c6726afd80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.004319] env[63297]: DEBUG nova.compute.manager [req-2b77b4e1-dc65-4022-91a6-47cc0823c1ee req-f6bc393d-40ef-4497-8e81-1801404b15a2 service nova] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Received event network-vif-deleted-6965237d-5263-4cc5-b1ac-d89a3ac02360 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1594.049639] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.050466] env[63297]: DEBUG nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1594.055238] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.411s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.055410] env[63297]: DEBUG nova.objects.instance [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'resources' on Instance uuid 020b06c5-44e2-4f74-a1dc-d7557db3537e {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1594.061885] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1594.062154] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1594.062295] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleting the datastore file [datastore1] a10df0e9-4278-48f1-b111-864ac793f630 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1594.062620] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b9eac85-8ddc-4882-8e8d-546a97499293 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.075123] env[63297]: DEBUG oslo_vmware.api [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1594.075123] env[63297]: value = "task-1697926" [ 1594.075123] env[63297]: _type = "Task" [ 1594.075123] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.087709] env[63297]: DEBUG oslo_vmware.api [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.144774] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "a6d86e78-ae24-4e70-9fb2-270177b40322" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.144912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.161837] env[63297]: INFO nova.compute.manager [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Rescuing [ 1594.162265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.162513] env[63297]: DEBUG oslo_concurrency.lockutils [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquired lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.162829] env[63297]: DEBUG nova.network.neutron [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1594.188521] env[63297]: DEBUG oslo_concurrency.lockutils [None req-03f24f19-e3bb-4615-b38f-a4ac5b49e68f tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.668s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.255383] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5218dd2b-00e6-9e1b-62b0-127c10d3c29d, 'name': SearchDatastore_Task, 'duration_secs': 0.014599} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.255903] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.256198] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1594.256453] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.256600] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.256775] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.257142] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccaf6c8f-3f1d-49b5-8792-346b75da0a81 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.280359] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.283185] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1594.283185] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db49dd02-38d1-4f98-8b47-6caed07198b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.291350] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1594.291350] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525546fd-d9f1-1e6f-c282-cad3ac340514" [ 1594.291350] env[63297]: _type = "Task" [ 1594.291350] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.301619] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525546fd-d9f1-1e6f-c282-cad3ac340514, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.501702] env[63297]: DEBUG oslo_concurrency.lockutils [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] Releasing lock "refresh_cache-cc644ecc-7340-421c-b966-19145eb82949" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.502010] env[63297]: DEBUG nova.compute.manager [req-7e792bad-9e65-4511-afe7-b1e9db3076fc req-ee103f3d-7557-43b5-90ec-2caf59318800 service nova] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Received event network-vif-deleted-ec16ec14-bcee-41ea-a7de-02e85d2b1169 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1594.560155] env[63297]: DEBUG nova.compute.utils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1594.562515] env[63297]: DEBUG nova.objects.instance [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'numa_topology' on Instance uuid 020b06c5-44e2-4f74-a1dc-d7557db3537e {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1594.564623] env[63297]: DEBUG nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1594.564623] env[63297]: DEBUG nova.network.neutron [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1594.597766] env[63297]: DEBUG oslo_vmware.api [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.384447} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.598178] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1594.598386] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1594.598561] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1594.598732] env[63297]: INFO nova.compute.manager [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1594.599034] env[63297]: DEBUG oslo.service.loopingcall [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.599233] env[63297]: DEBUG nova.compute.manager [-] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1594.599333] env[63297]: DEBUG nova.network.neutron [-] [instance: a10df0e9-4278-48f1-b111-864ac793f630] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1594.649679] env[63297]: DEBUG nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1594.664968] env[63297]: DEBUG nova.policy [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20a91144677b4efba8ab91acd53d1c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33733e0599840618625ecb3e6bb6029', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1594.690130] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.690405] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.808150] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525546fd-d9f1-1e6f-c282-cad3ac340514, 'name': SearchDatastore_Task, 'duration_secs': 0.022883} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.809172] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e9c2199-4f5c-4957-bf33-e8fcd909cd35 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.815859] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1594.815859] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523846a5-ad7e-c8d3-fa21-86996735d101" [ 1594.815859] env[63297]: _type = "Task" [ 1594.815859] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.826381] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523846a5-ad7e-c8d3-fa21-86996735d101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.005533] env[63297]: DEBUG nova.network.neutron [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updating instance_info_cache with network_info: [{"id": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "address": "fa:16:3e:b9:69:22", "network": {"id": "b20022f3-067f-42e6-9029-1f69f6657c27", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-894113652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e843a2460c21477ca894b3e2846ec98d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7f6ca3-5c", "ovs_interfaceid": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.064774] env[63297]: DEBUG nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1595.071773] env[63297]: DEBUG nova.objects.base [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Object Instance<020b06c5-44e2-4f74-a1dc-d7557db3537e> lazy-loaded attributes: resources,numa_topology {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1595.116798] env[63297]: DEBUG nova.compute.manager [req-d7a65b13-1127-4079-a8ec-02c759c4ee15 req-304d0d5c-7b10-45f2-9242-49424bae50d2 service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Received event network-vif-deleted-1fc6d6cc-328f-4608-becc-2ab8cce8dc98 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1595.117030] env[63297]: INFO nova.compute.manager [req-d7a65b13-1127-4079-a8ec-02c759c4ee15 req-304d0d5c-7b10-45f2-9242-49424bae50d2 service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Neutron deleted interface 1fc6d6cc-328f-4608-becc-2ab8cce8dc98; detaching it from the instance and deleting it from the info cache [ 1595.117252] env[63297]: DEBUG nova.network.neutron [req-d7a65b13-1127-4079-a8ec-02c759c4ee15 req-304d0d5c-7b10-45f2-9242-49424bae50d2 service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.174404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.193424] env[63297]: DEBUG nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1595.335021] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523846a5-ad7e-c8d3-fa21-86996735d101, 'name': SearchDatastore_Task, 'duration_secs': 0.013861} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.335021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.335021] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] cc644ecc-7340-421c-b966-19145eb82949/cc644ecc-7340-421c-b966-19145eb82949.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1595.335021] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5dd5436-01f3-4f27-a8c0-14c3f612339f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.343840] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1595.343840] env[63297]: value = "task-1697927" [ 1595.343840] env[63297]: _type = "Task" [ 1595.343840] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.358629] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.464805] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b78b442-8f26-4853-ad74-6f06a538ff7d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.477016] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca1ddbb-cba9-4a0f-8492-5089f4443b54 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.514531] env[63297]: DEBUG oslo_concurrency.lockutils [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Releasing lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.518141] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f9a9ea-0a65-49ca-a565-e25192e6ddcc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.527627] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bda8b77-7d99-4049-91bc-970a8a64fb9e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.548315] env[63297]: DEBUG nova.compute.provider_tree [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.583565] env[63297]: DEBUG nova.network.neutron [-] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.583565] env[63297]: DEBUG nova.network.neutron [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Successfully created port: 6b9e7381-f512-4fe3-9eb0-f334dbb61211 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1595.624749] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96fbc30c-27ce-46f8-a2a5-b0ee6eea1110 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.636064] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38eff4fc-0928-49bd-9a5f-e600241acda8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.684952] env[63297]: DEBUG nova.compute.manager [req-d7a65b13-1127-4079-a8ec-02c759c4ee15 req-304d0d5c-7b10-45f2-9242-49424bae50d2 service nova] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Detach interface failed, port_id=1fc6d6cc-328f-4608-becc-2ab8cce8dc98, reason: Instance a10df0e9-4278-48f1-b111-864ac793f630 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1595.718864] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.854784] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697927, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.054898] env[63297]: DEBUG nova.scheduler.client.report [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1596.063016] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1596.063328] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe072eff-f910-4edd-9121-93c3273e858e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.071801] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1596.071801] env[63297]: value = "task-1697928" [ 1596.071801] env[63297]: _type = "Task" [ 1596.071801] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.085729] env[63297]: INFO nova.compute.manager [-] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Took 1.49 seconds to deallocate network for instance. [ 1596.087824] env[63297]: DEBUG nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1596.089725] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.125428] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1596.125684] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1596.125837] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1596.126020] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1596.126192] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1596.126352] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1596.126571] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1596.126725] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1596.128152] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1596.128152] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1596.128152] env[63297]: DEBUG nova.virt.hardware [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1596.128337] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a2297e-3d75-4657-817e-34dcee23d4f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.138107] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7f64c6-6a10-4c20-a49d-d1891c134a52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.358374] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531596} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.358716] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] cc644ecc-7340-421c-b966-19145eb82949/cc644ecc-7340-421c-b966-19145eb82949.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1596.358997] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1596.359373] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a627fa8-3fdb-4f73-ac34-0de01dc3e7be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.366367] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1596.366367] env[63297]: value = "task-1697929" [ 1596.366367] env[63297]: _type = "Task" [ 1596.366367] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.375050] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697929, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.402077] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.402321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.563307] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.508s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.569978] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.742s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.569978] env[63297]: DEBUG nova.objects.instance [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1596.597763] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.600103] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697928, 'name': PowerOffVM_Task, 'duration_secs': 0.20788} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.600622] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1596.601624] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808b8253-9fc9-4efa-8410-0bf35e7ac532 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.659561] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd73446-5437-42bc-a7cc-37814e58e04f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.712184] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1596.712573] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56bdd676-29b0-4378-ab3c-a8b7729e9070 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.723125] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1596.723125] env[63297]: value = "task-1697930" [ 1596.723125] env[63297]: _type = "Task" [ 1596.723125] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.735884] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1596.735884] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1596.735884] env[63297]: DEBUG oslo_concurrency.lockutils [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.735884] env[63297]: DEBUG oslo_concurrency.lockutils [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.735884] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1596.735884] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac1c999f-0af8-46f2-9071-1d57e1eb7cd9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.746027] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1596.746027] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1596.746027] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6ec20a0-07a4-4ce9-b7bf-3a2775f32b97 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.751139] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1596.751139] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520be1ef-104e-5b36-cd11-ed64c6e4c284" [ 1596.751139] env[63297]: _type = "Task" [ 1596.751139] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.759328] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520be1ef-104e-5b36-cd11-ed64c6e4c284, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.876974] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697929, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091683} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.877312] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1596.878202] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a2627d-0df7-4267-95f9-31e8b6b8bd66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.901498] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] cc644ecc-7340-421c-b966-19145eb82949/cc644ecc-7340-421c-b966-19145eb82949.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1596.901990] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-498cf71b-732a-4562-8621-4e0e18edd9a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.925997] env[63297]: DEBUG nova.compute.utils [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1596.945945] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1596.945945] env[63297]: value = "task-1697931" [ 1596.945945] env[63297]: _type = "Task" [ 1596.945945] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.945945] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697931, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.089095] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aeadce50-d4d0-4491-a5d0-6c1a09d495e3 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 34.761s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.092818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 12.250s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.092818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.092818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.092818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.093839] env[63297]: INFO nova.compute.manager [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Terminating instance [ 1597.099829] env[63297]: DEBUG nova.compute.manager [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1597.100119] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1597.100822] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2910d97-5d33-4445-a1ef-68c9fa4eefe9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.115471] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b13474-40b3-48a9-b23e-a4246ee71552 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.159719] env[63297]: WARNING nova.virt.vmwareapi.vmops [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 020b06c5-44e2-4f74-a1dc-d7557db3537e could not be found. [ 1597.160141] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1597.160223] env[63297]: INFO nova.compute.manager [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1597.160554] env[63297]: DEBUG oslo.service.loopingcall [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1597.160873] env[63297]: DEBUG nova.compute.manager [-] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1597.160976] env[63297]: DEBUG nova.network.neutron [-] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1597.264844] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520be1ef-104e-5b36-cd11-ed64c6e4c284, 'name': SearchDatastore_Task, 'duration_secs': 0.009778} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.267294] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a41ffc8-c8e2-491c-aa89-aa17bafcb864 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.276723] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1597.276723] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5210c6bf-4157-76d6-acc5-224dd4610494" [ 1597.276723] env[63297]: _type = "Task" [ 1597.276723] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.287583] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5210c6bf-4157-76d6-acc5-224dd4610494, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.430249] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.028s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.449376] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.584160] env[63297]: DEBUG oslo_concurrency.lockutils [None req-23aab3e8-4d0c-45bc-ba7c-736f4fbcc14f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.586428] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.064s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.615960] env[63297]: DEBUG nova.objects.instance [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lazy-loading 'resources' on Instance uuid a0f4160e-cfb4-4d1d-bbee-6df44eb363fb {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1597.759027] env[63297]: DEBUG nova.network.neutron [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Successfully updated port: 6b9e7381-f512-4fe3-9eb0-f334dbb61211 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1597.790156] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5210c6bf-4157-76d6-acc5-224dd4610494, 'name': SearchDatastore_Task, 'duration_secs': 0.010529} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.791917] env[63297]: DEBUG oslo_concurrency.lockutils [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.791917] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. {{(pid=63297) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1597.791917] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94623e72-b649-4d12-a91d-793c43cb66d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.799664] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1597.799664] env[63297]: value = "task-1697932" [ 1597.799664] env[63297]: _type = "Task" [ 1597.799664] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.813271] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697932, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.946647] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697931, 'name': ReconfigVM_Task, 'duration_secs': 0.629092} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.947193] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Reconfigured VM instance instance-0000005b to attach disk [datastore1] cc644ecc-7340-421c-b966-19145eb82949/cc644ecc-7340-421c-b966-19145eb82949.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1597.948150] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bb15e1a-57ad-4f44-b127-5b8a803677b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.955738] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1597.955738] env[63297]: value = "task-1697933" [ 1597.955738] env[63297]: _type = "Task" [ 1597.955738] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.965944] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697933, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.996036] env[63297]: DEBUG nova.compute.manager [req-f5305589-66ba-4751-984e-170cb4801f52 req-2c98a5ad-7650-4134-b78f-d714a481cc18 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Received event network-vif-plugged-6b9e7381-f512-4fe3-9eb0-f334dbb61211 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1597.996036] env[63297]: DEBUG oslo_concurrency.lockutils [req-f5305589-66ba-4751-984e-170cb4801f52 req-2c98a5ad-7650-4134-b78f-d714a481cc18 service nova] Acquiring lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.996276] env[63297]: DEBUG oslo_concurrency.lockutils [req-f5305589-66ba-4751-984e-170cb4801f52 req-2c98a5ad-7650-4134-b78f-d714a481cc18 service nova] Lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.996499] env[63297]: DEBUG oslo_concurrency.lockutils [req-f5305589-66ba-4751-984e-170cb4801f52 req-2c98a5ad-7650-4134-b78f-d714a481cc18 service nova] Lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.999360] env[63297]: DEBUG nova.compute.manager [req-f5305589-66ba-4751-984e-170cb4801f52 req-2c98a5ad-7650-4134-b78f-d714a481cc18 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] No waiting events found dispatching network-vif-plugged-6b9e7381-f512-4fe3-9eb0-f334dbb61211 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1597.999360] env[63297]: WARNING nova.compute.manager [req-f5305589-66ba-4751-984e-170cb4801f52 req-2c98a5ad-7650-4134-b78f-d714a481cc18 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Received unexpected event network-vif-plugged-6b9e7381-f512-4fe3-9eb0-f334dbb61211 for instance with vm_state building and task_state spawning. [ 1598.084139] env[63297]: DEBUG nova.network.neutron [-] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.261268] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "refresh_cache-f9ad9854-2f5b-4edd-9636-8d36d0a89e89" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.261564] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "refresh_cache-f9ad9854-2f5b-4edd-9636-8d36d0a89e89" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.261564] env[63297]: DEBUG nova.network.neutron [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1598.310450] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697932, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468405} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.311365] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. [ 1598.314998] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63089c7a-4345-4993-a19e-646996bdf8bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.343766] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1598.348026] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a080d9e-9ae3-423b-aa14-a2adbb5b8135 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.369098] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1598.369098] env[63297]: value = "task-1697934" [ 1598.369098] env[63297]: _type = "Task" [ 1598.369098] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.381401] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697934, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.452394] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8843c98d-d6ab-40ad-9f88-4624884491d4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.463094] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11deec42-2809-41ad-9eb8-3386f5af3622 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.469772] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697933, 'name': Rename_Task, 'duration_secs': 0.202458} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.470553] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1598.470857] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1493ed4d-2e0f-4c4e-b2bd-5e05ae20767f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.499747] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.500209] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.500399] env[63297]: INFO nova.compute.manager [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Attaching volume 86accfd5-39e8-4bfb-a471-18e234f14803 to /dev/sdb [ 1598.504322] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a49382e-beec-44f6-822f-60426e7d6d9c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.507390] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1598.507390] env[63297]: value = "task-1697935" [ 1598.507390] env[63297]: _type = "Task" [ 1598.507390] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.517294] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f85e5d-2de0-4c4f-b390-7d2a45ef6d6b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.525178] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697935, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.536368] env[63297]: DEBUG nova.compute.provider_tree [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1598.541119] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92808e75-8cd0-4ca0-a189-6b52d331e864 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.549267] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d8cad0-0948-4e9c-ad2f-502fa2b60c10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.564013] env[63297]: DEBUG nova.virt.block_device [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updating existing volume attachment record: 3f396117-0139-4075-b932-66d56b84463f {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1598.587572] env[63297]: INFO nova.compute.manager [-] [instance: 020b06c5-44e2-4f74-a1dc-d7557db3537e] Took 1.43 seconds to deallocate network for instance. [ 1598.801093] env[63297]: DEBUG nova.network.neutron [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1598.883569] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697934, 'name': ReconfigVM_Task, 'duration_secs': 0.300016} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.883569] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1598.884771] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54414fd1-0792-4a41-843d-6c155a434252 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.922087] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1c86b70-7413-444b-806e-838423b186e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.940899] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1598.940899] env[63297]: value = "task-1697937" [ 1598.940899] env[63297]: _type = "Task" [ 1598.940899] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.951488] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.016160] env[63297]: DEBUG nova.network.neutron [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Updating instance_info_cache with network_info: [{"id": "6b9e7381-f512-4fe3-9eb0-f334dbb61211", "address": "fa:16:3e:c4:b1:5f", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b9e7381-f5", "ovs_interfaceid": "6b9e7381-f512-4fe3-9eb0-f334dbb61211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.021629] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697935, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.069389] env[63297]: ERROR nova.scheduler.client.report [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] [req-9280a461-00e8-4dd9-a939-1885d242e674] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9280a461-00e8-4dd9-a939-1885d242e674"}]} [ 1599.094635] env[63297]: DEBUG nova.scheduler.client.report [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1599.116917] env[63297]: DEBUG nova.scheduler.client.report [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1599.117600] env[63297]: DEBUG nova.compute.provider_tree [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1599.136329] env[63297]: DEBUG nova.scheduler.client.report [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1599.160023] env[63297]: DEBUG nova.scheduler.client.report [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1599.452240] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697937, 'name': ReconfigVM_Task, 'duration_secs': 0.165876} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.453840] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1599.455433] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2940e34-a8d5-4bfc-9d35-437471fbb2ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.458408] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97ee43b3-86ae-4487-b146-30cd7d89fc9c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.464917] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d727be7e-fcd7-4d84-857a-dac91285e1f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.468796] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1599.468796] env[63297]: value = "task-1697938" [ 1599.468796] env[63297]: _type = "Task" [ 1599.468796] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.477729] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.503876] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1927cab-20e6-45ca-b5e2-231996b39e72 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.516821] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ced47e6-8e4a-4611-8a97-ee3bd329357a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.523109] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "refresh_cache-f9ad9854-2f5b-4edd-9636-8d36d0a89e89" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.523429] env[63297]: DEBUG nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Instance network_info: |[{"id": "6b9e7381-f512-4fe3-9eb0-f334dbb61211", "address": "fa:16:3e:c4:b1:5f", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b9e7381-f5", "ovs_interfaceid": "6b9e7381-f512-4fe3-9eb0-f334dbb61211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1599.523879] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:b1:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b9e7381-f512-4fe3-9eb0-f334dbb61211', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1599.532541] env[63297]: DEBUG oslo.service.loopingcall [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1599.544961] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1599.545486] env[63297]: DEBUG nova.compute.provider_tree [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1599.546855] env[63297]: DEBUG oslo_vmware.api [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697935, 'name': PowerOnVM_Task, 'duration_secs': 0.587268} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.547270] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb4a2f76-edd5-4ab7-a0aa-e129f451ce07 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.561696] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1599.562253] env[63297]: INFO nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1599.562253] env[63297]: DEBUG nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1599.563539] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e7a51b-b982-4671-8ca5-38fae1eade20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.571940] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1599.571940] env[63297]: value = "task-1697939" [ 1599.571940] env[63297]: _type = "Task" [ 1599.571940] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.584757] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697939, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.620447] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4e0f7435-f372-45ac-87ec-4adeff6b553e tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "020b06c5-44e2-4f74-a1dc-d7557db3537e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.531s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.981090] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697938, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.030689] env[63297]: DEBUG nova.compute.manager [req-af43067b-37f6-491b-8ceb-3ead84c1a5c4 req-9a8a4572-8801-4e26-aac6-a475da956ef9 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Received event network-changed-6b9e7381-f512-4fe3-9eb0-f334dbb61211 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1600.031682] env[63297]: DEBUG nova.compute.manager [req-af43067b-37f6-491b-8ceb-3ead84c1a5c4 req-9a8a4572-8801-4e26-aac6-a475da956ef9 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Refreshing instance network info cache due to event network-changed-6b9e7381-f512-4fe3-9eb0-f334dbb61211. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1600.031682] env[63297]: DEBUG oslo_concurrency.lockutils [req-af43067b-37f6-491b-8ceb-3ead84c1a5c4 req-9a8a4572-8801-4e26-aac6-a475da956ef9 service nova] Acquiring lock "refresh_cache-f9ad9854-2f5b-4edd-9636-8d36d0a89e89" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.031682] env[63297]: DEBUG oslo_concurrency.lockutils [req-af43067b-37f6-491b-8ceb-3ead84c1a5c4 req-9a8a4572-8801-4e26-aac6-a475da956ef9 service nova] Acquired lock "refresh_cache-f9ad9854-2f5b-4edd-9636-8d36d0a89e89" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.031682] env[63297]: DEBUG nova.network.neutron [req-af43067b-37f6-491b-8ceb-3ead84c1a5c4 req-9a8a4572-8801-4e26-aac6-a475da956ef9 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Refreshing network info cache for port 6b9e7381-f512-4fe3-9eb0-f334dbb61211 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1600.091688] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697939, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.091688] env[63297]: INFO nova.compute.manager [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Took 30.09 seconds to build instance. [ 1600.101248] env[63297]: DEBUG nova.scheduler.client.report [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1600.101751] env[63297]: DEBUG nova.compute.provider_tree [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 123 to 124 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1600.102068] env[63297]: DEBUG nova.compute.provider_tree [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1600.481362] env[63297]: DEBUG oslo_vmware.api [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697938, 'name': PowerOnVM_Task, 'duration_secs': 0.620302} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.481630] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1600.484386] env[63297]: DEBUG nova.compute.manager [None req-540b2365-e307-4455-a387-3fb09ecdec41 tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1600.485374] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4bdae0-99c3-45ff-a4b1-6eba539412fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.591230] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697939, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.592827] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4b5052d-c928-4931-9268-539ded76492a tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "cc644ecc-7340-421c-b966-19145eb82949" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.607s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.608581] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.022s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.611691] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.692s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.612384] env[63297]: DEBUG nova.objects.instance [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lazy-loading 'resources' on Instance uuid 310cf8d4-613a-4c35-b118-7d79138e4799 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1600.638965] env[63297]: INFO nova.scheduler.client.report [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Deleted allocations for instance a0f4160e-cfb4-4d1d-bbee-6df44eb363fb [ 1600.818063] env[63297]: DEBUG nova.network.neutron [req-af43067b-37f6-491b-8ceb-3ead84c1a5c4 req-9a8a4572-8801-4e26-aac6-a475da956ef9 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Updated VIF entry in instance network info cache for port 6b9e7381-f512-4fe3-9eb0-f334dbb61211. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1600.818574] env[63297]: DEBUG nova.network.neutron [req-af43067b-37f6-491b-8ceb-3ead84c1a5c4 req-9a8a4572-8801-4e26-aac6-a475da956ef9 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Updating instance_info_cache with network_info: [{"id": "6b9e7381-f512-4fe3-9eb0-f334dbb61211", "address": "fa:16:3e:c4:b1:5f", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b9e7381-f5", "ovs_interfaceid": "6b9e7381-f512-4fe3-9eb0-f334dbb61211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.971612] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "cc644ecc-7340-421c-b966-19145eb82949" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.971952] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "cc644ecc-7340-421c-b966-19145eb82949" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.972199] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "cc644ecc-7340-421c-b966-19145eb82949-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.972390] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "cc644ecc-7340-421c-b966-19145eb82949-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.972560] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "cc644ecc-7340-421c-b966-19145eb82949-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.975056] env[63297]: INFO nova.compute.manager [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Terminating instance [ 1600.977034] env[63297]: DEBUG nova.compute.manager [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1600.977202] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1600.978357] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214b0c94-4541-42aa-bf77-6cc526b7a616 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.986678] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1600.986945] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b675348-5976-47a8-94cc-3ab9339cbcb0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.994062] env[63297]: DEBUG oslo_vmware.api [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1600.994062] env[63297]: value = "task-1697941" [ 1600.994062] env[63297]: _type = "Task" [ 1600.994062] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.007543] env[63297]: DEBUG oslo_vmware.api [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697941, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.084515] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697939, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.149916] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b4c907a2-9ce2-42ef-87d1-e2196c252891 tempest-ServerPasswordTestJSON-1351298218 tempest-ServerPasswordTestJSON-1351298218-project-member] Lock "a0f4160e-cfb4-4d1d-bbee-6df44eb363fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.718s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.310109] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.310404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.321193] env[63297]: DEBUG oslo_concurrency.lockutils [req-af43067b-37f6-491b-8ceb-3ead84c1a5c4 req-9a8a4572-8801-4e26-aac6-a475da956ef9 service nova] Releasing lock "refresh_cache-f9ad9854-2f5b-4edd-9636-8d36d0a89e89" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.505824] env[63297]: DEBUG oslo_vmware.api [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697941, 'name': PowerOffVM_Task, 'duration_secs': 0.418084} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.506163] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1601.506342] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1601.506729] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f2d3d6f-0534-485b-9451-2397961995cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.509898] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8fa6d7-afdd-471f-afe8-41b3507e831e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.517613] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f576b326-9768-4f6b-9454-737f83041950 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.550541] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cb9a23-8717-4cb1-8750-9e824892e000 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.558473] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bf8b4f-3863-4445-802d-7694baa88a01 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.573977] env[63297]: DEBUG nova.compute.provider_tree [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1601.588022] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697939, 'name': CreateVM_Task, 'duration_secs': 1.628873} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.588022] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1601.588022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.588022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.588022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1601.588022] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4953ffc0-35c6-4ae6-8bfc-7868103f86e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.597424] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1601.597424] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1601.597424] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Deleting the datastore file [datastore1] cc644ecc-7340-421c-b966-19145eb82949 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1601.599600] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c70bfaf-15c2-459d-b748-b9be7f42723f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.600252] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1601.600252] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ada00a-dd92-3f90-2403-8350894cc167" [ 1601.600252] env[63297]: _type = "Task" [ 1601.600252] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.605332] env[63297]: DEBUG oslo_vmware.api [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for the task: (returnval){ [ 1601.605332] env[63297]: value = "task-1697943" [ 1601.605332] env[63297]: _type = "Task" [ 1601.605332] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.612011] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ada00a-dd92-3f90-2403-8350894cc167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.616623] env[63297]: DEBUG oslo_vmware.api [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697943, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.813194] env[63297]: DEBUG nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1602.058150] env[63297]: DEBUG nova.compute.manager [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Received event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1602.058360] env[63297]: DEBUG nova.compute.manager [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing instance network info cache due to event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1602.058569] env[63297]: DEBUG oslo_concurrency.lockutils [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] Acquiring lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.058832] env[63297]: DEBUG oslo_concurrency.lockutils [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] Acquired lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.059016] env[63297]: DEBUG nova.network.neutron [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1602.080555] env[63297]: DEBUG nova.scheduler.client.report [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1602.123559] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ada00a-dd92-3f90-2403-8350894cc167, 'name': SearchDatastore_Task, 'duration_secs': 0.020588} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.124067] env[63297]: DEBUG oslo_vmware.api [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Task: {'id': task-1697943, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442233} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.124473] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.124877] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1602.125278] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.125565] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.125874] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1602.126282] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1602.126614] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1602.127023] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1602.128021] env[63297]: INFO nova.compute.manager [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] [instance: cc644ecc-7340-421c-b966-19145eb82949] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1602.128021] env[63297]: DEBUG oslo.service.loopingcall [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1602.128021] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-516c50a4-1005-469b-8df4-ee01c306988a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.130663] env[63297]: DEBUG nova.compute.manager [-] [instance: cc644ecc-7340-421c-b966-19145eb82949] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1602.130910] env[63297]: DEBUG nova.network.neutron [-] [instance: cc644ecc-7340-421c-b966-19145eb82949] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1602.145044] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1602.145044] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1602.145044] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-749f67ba-4992-40a1-9a8c-90fe0c33e0ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.152953] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1602.152953] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e96de3-85dc-c88a-0423-8d92f14751bc" [ 1602.152953] env[63297]: _type = "Task" [ 1602.152953] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.160199] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e96de3-85dc-c88a-0423-8d92f14751bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.177142] env[63297]: DEBUG oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a5093a-174e-a5ae-192c-4db824163944/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1602.177142] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e68fbac-72af-4158-b00c-e130ec0b1180 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.186820] env[63297]: DEBUG oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a5093a-174e-a5ae-192c-4db824163944/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1602.186988] env[63297]: ERROR oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a5093a-174e-a5ae-192c-4db824163944/disk-0.vmdk due to incomplete transfer. [ 1602.188322] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a0199e0a-9823-49a5-b98a-5db8ad41424a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.195253] env[63297]: DEBUG oslo_vmware.rw_handles [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a5093a-174e-a5ae-192c-4db824163944/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1602.195253] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Uploaded image 6e1de458-527b-4161-a12d-2d2d0f5efddb to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1602.196812] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1602.197817] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7a04acc7-80ea-4666-8090-ad67b523cc7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.204228] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1602.204228] env[63297]: value = "task-1697944" [ 1602.204228] env[63297]: _type = "Task" [ 1602.204228] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.214541] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697944, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.345206] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.589025] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.975s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.589025] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.415s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.591763] env[63297]: INFO nova.compute.claims [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1602.614959] env[63297]: INFO nova.scheduler.client.report [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted allocations for instance 310cf8d4-613a-4c35-b118-7d79138e4799 [ 1602.664322] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e96de3-85dc-c88a-0423-8d92f14751bc, 'name': SearchDatastore_Task, 'duration_secs': 0.009585} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.665488] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9887ddef-5407-4676-9732-0066b334321c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.672394] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1602.672394] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52717143-c612-d1f7-d362-a0b68233dc3c" [ 1602.672394] env[63297]: _type = "Task" [ 1602.672394] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.682861] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52717143-c612-d1f7-d362-a0b68233dc3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.715912] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697944, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.927475] env[63297]: DEBUG nova.network.neutron [-] [instance: cc644ecc-7340-421c-b966-19145eb82949] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.969224] env[63297]: DEBUG nova.network.neutron [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updated VIF entry in instance network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1602.969657] env[63297]: DEBUG nova.network.neutron [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updating instance_info_cache with network_info: [{"id": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "address": "fa:16:3e:b9:69:22", "network": {"id": "b20022f3-067f-42e6-9029-1f69f6657c27", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-894113652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e843a2460c21477ca894b3e2846ec98d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7f6ca3-5c", "ovs_interfaceid": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.123223] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1603.123476] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353972', 'volume_id': '86accfd5-39e8-4bfb-a471-18e234f14803', 'name': 'volume-86accfd5-39e8-4bfb-a471-18e234f14803', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b7b9f1b-d277-4219-92fb-e35a8b867e77', 'attached_at': '', 'detached_at': '', 'volume_id': '86accfd5-39e8-4bfb-a471-18e234f14803', 'serial': '86accfd5-39e8-4bfb-a471-18e234f14803'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1603.127705] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2443804-5a94-4ac9-91e1-34cc7a072a9c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.131447] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09bd68f5-d02a-4002-89f9-1d982438cea4 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "310cf8d4-613a-4c35-b118-7d79138e4799" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.405s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.155911] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631a9aaf-ffd6-4222-8d61-f6f1d801329a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.181312] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] volume-86accfd5-39e8-4bfb-a471-18e234f14803/volume-86accfd5-39e8-4bfb-a471-18e234f14803.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1603.187031] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf5dcacc-f115-43f0-a031-5fbc551bb186 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.211020] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52717143-c612-d1f7-d362-a0b68233dc3c, 'name': SearchDatastore_Task, 'duration_secs': 0.020922} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.212807] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.213292] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f9ad9854-2f5b-4edd-9636-8d36d0a89e89/f9ad9854-2f5b-4edd-9636-8d36d0a89e89.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1603.213810] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1603.213810] env[63297]: value = "task-1697945" [ 1603.213810] env[63297]: _type = "Task" [ 1603.213810] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.214196] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c5ded4d-c9bb-4938-8b51-2b11e1dd1ab6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.225361] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697944, 'name': Destroy_Task, 'duration_secs': 0.770458} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.225930] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Destroyed the VM [ 1603.226187] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1603.226447] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1138f994-b618-48ef-9901-2f6dfdfe4065 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.233021] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.233329] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1603.233329] env[63297]: value = "task-1697946" [ 1603.233329] env[63297]: _type = "Task" [ 1603.233329] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.240733] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1603.240733] env[63297]: value = "task-1697947" [ 1603.240733] env[63297]: _type = "Task" [ 1603.240733] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.244791] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.253071] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697947, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.430637] env[63297]: INFO nova.compute.manager [-] [instance: cc644ecc-7340-421c-b966-19145eb82949] Took 1.30 seconds to deallocate network for instance. [ 1603.474674] env[63297]: DEBUG oslo_concurrency.lockutils [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] Releasing lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.475014] env[63297]: DEBUG nova.compute.manager [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Received event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1603.475221] env[63297]: DEBUG nova.compute.manager [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing instance network info cache due to event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1603.475469] env[63297]: DEBUG oslo_concurrency.lockutils [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] Acquiring lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.475580] env[63297]: DEBUG oslo_concurrency.lockutils [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] Acquired lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.475745] env[63297]: DEBUG nova.network.neutron [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1603.731195] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.747932] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697946, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.761075] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697947, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.937590] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.982279] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1603.982490] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.004202] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229e9811-b9ba-48e9-be36-6474a59f505d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.023074] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48550ab-5540-4b5e-aa2e-ef86eb69d376 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.062061] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b21445-4a37-493c-bb73-35c0ca9d9b8e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.069674] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2a0793-a209-445f-b66d-d1492cb568ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.092681] env[63297]: DEBUG nova.compute.provider_tree [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1604.232569] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697945, 'name': ReconfigVM_Task, 'duration_secs': 0.854688} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.232569] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfigured VM instance instance-00000052 to attach disk [datastore1] volume-86accfd5-39e8-4bfb-a471-18e234f14803/volume-86accfd5-39e8-4bfb-a471-18e234f14803.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1604.239046] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05dd432b-e818-45d3-9100-42ca372aa401 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.264641] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697947, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.270064] env[63297]: DEBUG nova.network.neutron [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updated VIF entry in instance network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1604.270419] env[63297]: DEBUG nova.network.neutron [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updating instance_info_cache with network_info: [{"id": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "address": "fa:16:3e:b9:69:22", "network": {"id": "b20022f3-067f-42e6-9029-1f69f6657c27", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-894113652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e843a2460c21477ca894b3e2846ec98d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7f6ca3-5c", "ovs_interfaceid": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.271841] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1604.271841] env[63297]: value = "task-1697948" [ 1604.271841] env[63297]: _type = "Task" [ 1604.271841] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.272069] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697946, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.717746} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.272361] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f9ad9854-2f5b-4edd-9636-8d36d0a89e89/f9ad9854-2f5b-4edd-9636-8d36d0a89e89.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1604.272556] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1604.275810] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edfa270e-6099-48ed-980c-dbd233a1461b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.283816] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697948, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.285554] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1604.285554] env[63297]: value = "task-1697949" [ 1604.285554] env[63297]: _type = "Task" [ 1604.285554] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.295496] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.492533] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.492841] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1604.598019] env[63297]: DEBUG nova.scheduler.client.report [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1604.603109] env[63297]: DEBUG nova.compute.manager [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] [instance: cc644ecc-7340-421c-b966-19145eb82949] Received event network-vif-deleted-9498ba84-eb30-4513-a275-7c6726afd80b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1604.604402] env[63297]: DEBUG nova.compute.manager [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Received event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1604.604402] env[63297]: DEBUG nova.compute.manager [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing instance network info cache due to event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1604.604402] env[63297]: DEBUG oslo_concurrency.lockutils [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] Acquiring lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.773790] env[63297]: DEBUG oslo_vmware.api [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697947, 'name': RemoveSnapshot_Task, 'duration_secs': 1.230115} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.773790] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1604.774702] env[63297]: INFO nova.compute.manager [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Took 16.16 seconds to snapshot the instance on the hypervisor. [ 1604.779697] env[63297]: DEBUG oslo_concurrency.lockutils [req-6f049980-b955-44fe-8634-ccf1d0d99ec3 req-0d18a1ee-769b-4cc8-8d52-d01b058ba5f3 service nova] Releasing lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.784696] env[63297]: DEBUG oslo_concurrency.lockutils [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] Acquired lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.785069] env[63297]: DEBUG nova.network.neutron [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1604.797090] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697948, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.802282] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066361} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.802722] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1604.804726] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b606fc61-ff6f-40e4-a1b5-215449956c12 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.832016] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] f9ad9854-2f5b-4edd-9636-8d36d0a89e89/f9ad9854-2f5b-4edd-9636-8d36d0a89e89.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1604.832016] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6f1737b-3268-4680-8a2d-9789981948f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.849657] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1604.849657] env[63297]: value = "task-1697950" [ 1604.849657] env[63297]: _type = "Task" [ 1604.849657] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.858459] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697950, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.908033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "b261c90f-642d-42b7-8b79-d87eeaf0537a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.908033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b261c90f-642d-42b7-8b79-d87eeaf0537a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.908190] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "b261c90f-642d-42b7-8b79-d87eeaf0537a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.908338] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b261c90f-642d-42b7-8b79-d87eeaf0537a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.908507] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b261c90f-642d-42b7-8b79-d87eeaf0537a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.910633] env[63297]: INFO nova.compute.manager [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Terminating instance [ 1604.913232] env[63297]: DEBUG nova.compute.manager [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1604.913232] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1604.913973] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4448c7fa-cdf6-4638-8bba-69a20615f67d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.922963] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1604.922963] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf2580d0-57ed-4507-8015-30b2cab1b073 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.927900] env[63297]: DEBUG oslo_vmware.api [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1604.927900] env[63297]: value = "task-1697951" [ 1604.927900] env[63297]: _type = "Task" [ 1604.927900] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.935481] env[63297]: DEBUG oslo_vmware.api [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.102031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.104336] env[63297]: DEBUG nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1605.105519] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.387s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.107340] env[63297]: INFO nova.compute.claims [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1605.158539] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.158809] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.159030] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.159214] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.159383] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.163934] env[63297]: INFO nova.compute.manager [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Terminating instance [ 1605.167741] env[63297]: DEBUG nova.compute.manager [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1605.167923] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1605.168950] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbc64b2-9158-4c96-9be3-3a1b634c0625 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.178911] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.178911] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66f75d60-69e3-464b-87af-44ceff94d874 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.183573] env[63297]: DEBUG oslo_vmware.api [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1605.183573] env[63297]: value = "task-1697952" [ 1605.183573] env[63297]: _type = "Task" [ 1605.183573] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.192734] env[63297]: DEBUG oslo_vmware.api [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697952, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.275188] env[63297]: DEBUG nova.compute.manager [req-9dbcb9c1-c2c2-4563-a533-6daab9210ae8 req-545cb72d-da81-48db-b8cc-502cba8ebf4b service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Received event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1605.275392] env[63297]: DEBUG nova.compute.manager [req-9dbcb9c1-c2c2-4563-a533-6daab9210ae8 req-545cb72d-da81-48db-b8cc-502cba8ebf4b service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing instance network info cache due to event network-changed-8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1605.275574] env[63297]: DEBUG oslo_concurrency.lockutils [req-9dbcb9c1-c2c2-4563-a533-6daab9210ae8 req-545cb72d-da81-48db-b8cc-502cba8ebf4b service nova] Acquiring lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.290696] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697948, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.325957] env[63297]: DEBUG nova.compute.manager [None req-54e6b182-8022-403d-8508-3261c9ff4038 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Found 1 images (rotation: 2) {{(pid=63297) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1605.367999] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.438937] env[63297]: DEBUG oslo_vmware.api [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.612508] env[63297]: DEBUG nova.compute.utils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1605.614242] env[63297]: DEBUG nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1605.614466] env[63297]: DEBUG nova.network.neutron [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1605.684629] env[63297]: DEBUG nova.network.neutron [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updated VIF entry in instance network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1605.684629] env[63297]: DEBUG nova.network.neutron [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updating instance_info_cache with network_info: [{"id": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "address": "fa:16:3e:b9:69:22", "network": {"id": "b20022f3-067f-42e6-9029-1f69f6657c27", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-894113652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e843a2460c21477ca894b3e2846ec98d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7f6ca3-5c", "ovs_interfaceid": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.696868] env[63297]: DEBUG oslo_vmware.api [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697952, 'name': PowerOffVM_Task, 'duration_secs': 0.218558} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.698072] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1605.698274] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1605.698536] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4434321-4857-405b-aa14-e5d3a3e0a66e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.711756] env[63297]: DEBUG nova.policy [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f80bce1510594b8a95537f814f68b2bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45448893e7ee4b8d896d1bb3f3a9ecf1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1605.720593] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.720734] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.780786] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1605.781073] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1605.781403] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleting the datastore file [datastore1] 44f4776e-d4a1-40ad-a03b-bb03582b95bd {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1605.786707] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-458a896a-20d2-4f83-ad4c-355d5607e3f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.794974] env[63297]: DEBUG oslo_vmware.api [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697948, 'name': ReconfigVM_Task, 'duration_secs': 1.167233} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.798503] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353972', 'volume_id': '86accfd5-39e8-4bfb-a471-18e234f14803', 'name': 'volume-86accfd5-39e8-4bfb-a471-18e234f14803', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b7b9f1b-d277-4219-92fb-e35a8b867e77', 'attached_at': '', 'detached_at': '', 'volume_id': '86accfd5-39e8-4bfb-a471-18e234f14803', 'serial': '86accfd5-39e8-4bfb-a471-18e234f14803'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1605.804019] env[63297]: DEBUG oslo_vmware.api [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1605.804019] env[63297]: value = "task-1697954" [ 1605.804019] env[63297]: _type = "Task" [ 1605.804019] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.810736] env[63297]: DEBUG oslo_vmware.api [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.862551] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697950, 'name': ReconfigVM_Task, 'duration_secs': 0.977113} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.862830] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Reconfigured VM instance instance-0000005c to attach disk [datastore1] f9ad9854-2f5b-4edd-9636-8d36d0a89e89/f9ad9854-2f5b-4edd-9636-8d36d0a89e89.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1605.863634] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6d0c757-a6a7-4c91-804e-4706a62319f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.872110] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1605.872110] env[63297]: value = "task-1697955" [ 1605.872110] env[63297]: _type = "Task" [ 1605.872110] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.878609] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697955, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.939820] env[63297]: DEBUG oslo_vmware.api [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697951, 'name': PowerOffVM_Task, 'duration_secs': 0.839957} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.940131] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1605.940307] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1605.940564] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98380fb2-180d-4f4c-a3fc-f129536851f0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.014779] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1606.015544] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1606.016059] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Deleting the datastore file [datastore1] b261c90f-642d-42b7-8b79-d87eeaf0537a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1606.017150] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dceef855-fb13-44db-a4e2-4372c1577252 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.025359] env[63297]: DEBUG oslo_vmware.api [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for the task: (returnval){ [ 1606.025359] env[63297]: value = "task-1697957" [ 1606.025359] env[63297]: _type = "Task" [ 1606.025359] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.035861] env[63297]: DEBUG oslo_vmware.api [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.039970] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "617544f0-fa53-415d-9f00-c8143e8e25b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.040326] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "617544f0-fa53-415d-9f00-c8143e8e25b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.120143] env[63297]: DEBUG nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1606.134675] env[63297]: DEBUG nova.network.neutron [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Successfully created port: e0aae0a1-c525-4233-b876-799c11006f75 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1606.193610] env[63297]: DEBUG oslo_concurrency.lockutils [req-0a3172b5-d178-4101-a091-22b0d2366c38 req-a1c15699-3466-4ac4-a1ad-d3ef9826dda1 service nova] Releasing lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.193610] env[63297]: DEBUG oslo_concurrency.lockutils [req-9dbcb9c1-c2c2-4563-a533-6daab9210ae8 req-545cb72d-da81-48db-b8cc-502cba8ebf4b service nova] Acquired lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.193610] env[63297]: DEBUG nova.network.neutron [req-9dbcb9c1-c2c2-4563-a533-6daab9210ae8 req-545cb72d-da81-48db-b8cc-502cba8ebf4b service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Refreshing network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1606.223635] env[63297]: DEBUG nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1606.316563] env[63297]: DEBUG oslo_vmware.api [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1697954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191907} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.319640] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1606.319863] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1606.320694] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1606.320694] env[63297]: INFO nova.compute.manager [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1606.320694] env[63297]: DEBUG oslo.service.loopingcall [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.320933] env[63297]: DEBUG nova.compute.manager [-] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1606.321041] env[63297]: DEBUG nova.network.neutron [-] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1606.384296] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697955, 'name': Rename_Task, 'duration_secs': 0.140146} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.384753] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1606.385063] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1dd08947-b209-47a3-9c8f-a794859d5e3d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.394209] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1606.394209] env[63297]: value = "task-1697958" [ 1606.394209] env[63297]: _type = "Task" [ 1606.394209] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.407843] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.515449] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988bc913-d71b-45e0-8c88-d09f3e3f3036 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.526697] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b376f3b-eac8-41f2-9d6a-bf41ba472bd7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.568066] env[63297]: DEBUG oslo_vmware.api [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.568888] env[63297]: DEBUG nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1606.572666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3f548d-a512-43af-97c3-ff283218bbf6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.580467] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabdca56-8606-44f1-b8fa-ce6cd5988759 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.597755] env[63297]: DEBUG nova.compute.provider_tree [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1606.675551] env[63297]: DEBUG nova.compute.manager [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1606.676672] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c526fd40-87c6-4c2b-b729-5bb86db29944 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.756866] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.805477] env[63297]: DEBUG nova.compute.manager [req-8cd208bb-c9e6-4ce8-8481-82eb3699c5c2 req-bcb3128b-6ee3-4297-be1f-daa03addbd1c service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Received event network-vif-deleted-5da611f1-76bd-4f99-a624-d504e942a954 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1606.805783] env[63297]: INFO nova.compute.manager [req-8cd208bb-c9e6-4ce8-8481-82eb3699c5c2 req-bcb3128b-6ee3-4297-be1f-daa03addbd1c service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Neutron deleted interface 5da611f1-76bd-4f99-a624-d504e942a954; detaching it from the instance and deleting it from the info cache [ 1606.805848] env[63297]: DEBUG nova.network.neutron [req-8cd208bb-c9e6-4ce8-8481-82eb3699c5c2 req-bcb3128b-6ee3-4297-be1f-daa03addbd1c service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.847097] env[63297]: DEBUG nova.objects.instance [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 0b7b9f1b-d277-4219-92fb-e35a8b867e77 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1606.905676] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.905908] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.906133] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.906314] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.906477] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.907932] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697958, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.908365] env[63297]: INFO nova.compute.manager [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Terminating instance [ 1606.910411] env[63297]: DEBUG nova.compute.manager [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1606.910599] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1606.911411] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d958db8a-459f-47fa-9a3e-dd771a1528eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.920774] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1606.921272] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f282c5b9-89cb-4a59-b861-ae1148313b32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.926906] env[63297]: DEBUG oslo_vmware.api [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1606.926906] env[63297]: value = "task-1697959" [ 1606.926906] env[63297]: _type = "Task" [ 1606.926906] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.936462] env[63297]: DEBUG oslo_vmware.api [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.951335] env[63297]: DEBUG nova.network.neutron [req-9dbcb9c1-c2c2-4563-a533-6daab9210ae8 req-545cb72d-da81-48db-b8cc-502cba8ebf4b service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updated VIF entry in instance network info cache for port 8c7f6ca3-5c56-4270-88a0-28ddd06a0256. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1606.951693] env[63297]: DEBUG nova.network.neutron [req-9dbcb9c1-c2c2-4563-a533-6daab9210ae8 req-545cb72d-da81-48db-b8cc-502cba8ebf4b service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updating instance_info_cache with network_info: [{"id": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "address": "fa:16:3e:b9:69:22", "network": {"id": "b20022f3-067f-42e6-9029-1f69f6657c27", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-894113652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e843a2460c21477ca894b3e2846ec98d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7f6ca3-5c", "ovs_interfaceid": "8c7f6ca3-5c56-4270-88a0-28ddd06a0256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.040722] env[63297]: DEBUG oslo_vmware.api [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Task: {'id': task-1697957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.548276} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.040968] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1607.041161] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1607.041334] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1607.041503] env[63297]: INFO nova.compute.manager [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Took 2.13 seconds to destroy the instance on the hypervisor. [ 1607.041735] env[63297]: DEBUG oslo.service.loopingcall [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1607.041939] env[63297]: DEBUG nova.compute.manager [-] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1607.042055] env[63297]: DEBUG nova.network.neutron [-] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1607.091675] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.120912] env[63297]: ERROR nova.scheduler.client.report [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [req-531f77af-9b28-4258-ab6b-5d1bc19853ab] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-531f77af-9b28-4258-ab6b-5d1bc19853ab"}]} [ 1607.133445] env[63297]: DEBUG nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1607.138148] env[63297]: DEBUG nova.scheduler.client.report [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1607.154945] env[63297]: DEBUG nova.scheduler.client.report [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1607.155200] env[63297]: DEBUG nova.compute.provider_tree [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1607.164057] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1607.164367] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1607.164546] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1607.164754] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1607.164955] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1607.165151] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1607.165442] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1607.165659] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1607.165751] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1607.165888] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1607.166073] env[63297]: DEBUG nova.virt.hardware [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1607.166913] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450ac051-70c4-488a-b64f-87b5961dcee7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.170755] env[63297]: DEBUG nova.scheduler.client.report [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1607.179656] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc6bd32-e491-474a-96cb-88b45f6bcf46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.196793] env[63297]: INFO nova.compute.manager [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] instance snapshotting [ 1607.198153] env[63297]: DEBUG nova.objects.instance [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'flavor' on Instance uuid b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1607.201036] env[63297]: DEBUG nova.scheduler.client.report [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1607.286136] env[63297]: DEBUG nova.network.neutron [-] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.309305] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0e71012-18fc-4874-8014-256f91dac23c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.321203] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e05ca4d-cd1f-44c9-b0e6-052068f3fd47 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.358516] env[63297]: DEBUG nova.compute.manager [req-8cd208bb-c9e6-4ce8-8481-82eb3699c5c2 req-bcb3128b-6ee3-4297-be1f-daa03addbd1c service nova] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Detach interface failed, port_id=5da611f1-76bd-4f99-a624-d504e942a954, reason: Instance 44f4776e-d4a1-40ad-a03b-bb03582b95bd could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1607.361660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4efab66c-2ea3-45b3-a162-9f0143e73f7e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.862s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.416535] env[63297]: DEBUG oslo_vmware.api [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697958, 'name': PowerOnVM_Task, 'duration_secs': 0.660161} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.417271] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1607.417271] env[63297]: INFO nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Took 11.33 seconds to spawn the instance on the hypervisor. [ 1607.417490] env[63297]: DEBUG nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1607.418620] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a0d59d-4975-45b7-9883-9005b364c0c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.438953] env[63297]: DEBUG oslo_vmware.api [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697959, 'name': PowerOffVM_Task, 'duration_secs': 0.22985} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.439203] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1607.439367] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1607.439627] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2221ae8-2041-4bb9-99b0-54d4a9941406 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.455408] env[63297]: DEBUG oslo_concurrency.lockutils [req-9dbcb9c1-c2c2-4563-a533-6daab9210ae8 req-545cb72d-da81-48db-b8cc-502cba8ebf4b service nova] Releasing lock "refresh_cache-6c1aa85a-ee37-461b-ad8a-7fbb525e836e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.528647] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1607.528855] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1607.529042] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Deleting the datastore file [datastore1] 6c1aa85a-ee37-461b-ad8a-7fbb525e836e {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1607.529408] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b54c0c22-3d84-4ab6-9f23-2f1a2354397a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.539806] env[63297]: DEBUG oslo_vmware.api [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for the task: (returnval){ [ 1607.539806] env[63297]: value = "task-1697961" [ 1607.539806] env[63297]: _type = "Task" [ 1607.539806] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.549505] env[63297]: DEBUG oslo_vmware.api [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.551515] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e9c1c3-82bd-4720-b59b-3d6869989aef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.562111] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ff65e5-7cea-4f5c-95f0-d7e35044d5d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.597173] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271a83ce-26ce-49e4-9d01-032a87613ce9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.600781] env[63297]: DEBUG nova.compute.manager [req-57656161-216e-431c-ade7-b63341a6eb7f req-e60e1959-fa75-420f-9ace-aa0c4862414b service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Received event network-vif-deleted-bc6f8a91-4b3e-4532-be2b-27a1ebd119aa {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1607.600883] env[63297]: INFO nova.compute.manager [req-57656161-216e-431c-ade7-b63341a6eb7f req-e60e1959-fa75-420f-9ace-aa0c4862414b service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Neutron deleted interface bc6f8a91-4b3e-4532-be2b-27a1ebd119aa; detaching it from the instance and deleting it from the info cache [ 1607.601015] env[63297]: DEBUG nova.network.neutron [req-57656161-216e-431c-ade7-b63341a6eb7f req-e60e1959-fa75-420f-9ace-aa0c4862414b service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.608307] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700c7ed3-b14e-4da8-aaa6-48eabfbdf4fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.623288] env[63297]: DEBUG nova.compute.provider_tree [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1607.707035] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eaf6302-73ad-49e6-8e5b-e30ed1a425d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.729682] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf1d08d-3ed8-4494-bcff-4ca0f3a74ee1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.789497] env[63297]: INFO nova.compute.manager [-] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Took 1.47 seconds to deallocate network for instance. [ 1607.926331] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.926620] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.942255] env[63297]: INFO nova.compute.manager [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Took 25.70 seconds to build instance. [ 1607.984287] env[63297]: DEBUG nova.network.neutron [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Successfully updated port: e0aae0a1-c525-4233-b876-799c11006f75 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1608.050291] env[63297]: DEBUG oslo_vmware.api [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Task: {'id': task-1697961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262352} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.050568] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1608.050761] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1608.050938] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1608.051452] env[63297]: INFO nova.compute.manager [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1608.051452] env[63297]: DEBUG oslo.service.loopingcall [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1608.051581] env[63297]: DEBUG nova.compute.manager [-] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1608.051628] env[63297]: DEBUG nova.network.neutron [-] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1608.065798] env[63297]: DEBUG nova.network.neutron [-] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.103438] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83074bd9-685f-4251-87d3-fcd9af24961c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.114227] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1458c2-ef7a-437a-ad85-63b0a3efc2dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.148096] env[63297]: DEBUG nova.compute.manager [req-57656161-216e-431c-ade7-b63341a6eb7f req-e60e1959-fa75-420f-9ace-aa0c4862414b service nova] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Detach interface failed, port_id=bc6f8a91-4b3e-4532-be2b-27a1ebd119aa, reason: Instance b261c90f-642d-42b7-8b79-d87eeaf0537a could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1608.159920] env[63297]: DEBUG nova.scheduler.client.report [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 127 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1608.159920] env[63297]: DEBUG nova.compute.provider_tree [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 127 to 128 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1608.159920] env[63297]: DEBUG nova.compute.provider_tree [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1608.240485] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1608.240847] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-375d9b6f-6798-4a06-a8e9-f7ff7f288287 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.249215] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1608.249215] env[63297]: value = "task-1697962" [ 1608.249215] env[63297]: _type = "Task" [ 1608.249215] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.257865] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697962, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.296995] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.429792] env[63297]: DEBUG nova.compute.utils [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1608.444515] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d39aa498-9334-43de-beac-c9928e828c10 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.218s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.486282] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.486431] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.486579] env[63297]: DEBUG nova.network.neutron [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1608.568172] env[63297]: INFO nova.compute.manager [-] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Took 1.53 seconds to deallocate network for instance. [ 1608.664602] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.559s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.665153] env[63297]: DEBUG nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1608.667733] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.071s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.667945] env[63297]: DEBUG nova.objects.instance [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lazy-loading 'resources' on Instance uuid a10df0e9-4278-48f1-b111-864ac793f630 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1608.760773] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697962, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.785761] env[63297]: DEBUG nova.network.neutron [-] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.933212] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.002396] env[63297]: DEBUG nova.compute.manager [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Received event network-vif-plugged-e0aae0a1-c525-4233-b876-799c11006f75 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1609.002751] env[63297]: DEBUG oslo_concurrency.lockutils [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] Acquiring lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.004093] env[63297]: DEBUG oslo_concurrency.lockutils [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.004093] env[63297]: DEBUG oslo_concurrency.lockutils [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.004093] env[63297]: DEBUG nova.compute.manager [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] No waiting events found dispatching network-vif-plugged-e0aae0a1-c525-4233-b876-799c11006f75 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1609.004093] env[63297]: WARNING nova.compute.manager [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Received unexpected event network-vif-plugged-e0aae0a1-c525-4233-b876-799c11006f75 for instance with vm_state building and task_state spawning. [ 1609.004093] env[63297]: DEBUG nova.compute.manager [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Received event network-changed-e0aae0a1-c525-4233-b876-799c11006f75 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1609.004093] env[63297]: DEBUG nova.compute.manager [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Refreshing instance network info cache due to event network-changed-e0aae0a1-c525-4233-b876-799c11006f75. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1609.004093] env[63297]: DEBUG oslo_concurrency.lockutils [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] Acquiring lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.024907] env[63297]: DEBUG nova.network.neutron [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1609.064848] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.065018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.065167] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1609.073935] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.172339] env[63297]: DEBUG nova.compute.utils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1609.179422] env[63297]: DEBUG nova.network.neutron [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance_info_cache with network_info: [{"id": "e0aae0a1-c525-4233-b876-799c11006f75", "address": "fa:16:3e:f7:ce:b0", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0aae0a1-c5", "ovs_interfaceid": "e0aae0a1-c525-4233-b876-799c11006f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.180840] env[63297]: DEBUG nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1609.180840] env[63297]: DEBUG nova.network.neutron [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1609.219688] env[63297]: DEBUG nova.policy [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1012cd7cb62c4ef593edecd3a38f4acb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01fe9157b11244cb86a7626caae0616d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1609.265630] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697962, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.288585] env[63297]: INFO nova.compute.manager [-] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Took 1.24 seconds to deallocate network for instance. [ 1609.496457] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a310c1-077f-4a25-90cc-4c372e784c76 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.505391] env[63297]: DEBUG nova.network.neutron [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Successfully created port: bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1609.511114] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d03c09d-815a-4acf-a17b-98a96c673d3b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.544870] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3d2852-1636-4173-bdfc-0eac3a5fbec7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.552884] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f87bf45-8076-408d-aad1-93951c87c7a3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.567479] env[63297]: DEBUG nova.compute.provider_tree [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1609.678511] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.678885] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.679177] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.679402] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.679590] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.681682] env[63297]: DEBUG nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1609.685006] env[63297]: INFO nova.compute.manager [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Terminating instance [ 1609.686534] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.686836] env[63297]: DEBUG nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Instance network_info: |[{"id": "e0aae0a1-c525-4233-b876-799c11006f75", "address": "fa:16:3e:f7:ce:b0", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0aae0a1-c5", "ovs_interfaceid": "e0aae0a1-c525-4233-b876-799c11006f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1609.687847] env[63297]: DEBUG nova.compute.manager [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1609.687925] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1609.688406] env[63297]: DEBUG oslo_concurrency.lockutils [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] Acquired lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.688621] env[63297]: DEBUG nova.network.neutron [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Refreshing network info cache for port e0aae0a1-c525-4233-b876-799c11006f75 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1609.689714] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:ce:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0aae0a1-c525-4233-b876-799c11006f75', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1609.698168] env[63297]: DEBUG oslo.service.loopingcall [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1609.699159] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229a8497-a99d-412a-bec8-15ce50d09a59 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.704718] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1609.707126] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d838975b-056c-4243-ae74-cc22cd964024 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.726570] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1609.726845] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90d14ee7-1d7e-4e04-8d4c-294da04bd926 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.729897] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1609.729897] env[63297]: value = "task-1697963" [ 1609.729897] env[63297]: _type = "Task" [ 1609.729897] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.734774] env[63297]: DEBUG oslo_vmware.api [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1609.734774] env[63297]: value = "task-1697964" [ 1609.734774] env[63297]: _type = "Task" [ 1609.734774] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.738131] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697963, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.746529] env[63297]: DEBUG oslo_vmware.api [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.759811] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697962, 'name': CreateSnapshot_Task, 'duration_secs': 1.098391} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.760138] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1609.761022] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ec5263-ebd3-48fd-81da-b83719d934f9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.798993] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.996413] env[63297]: DEBUG nova.network.neutron [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updated VIF entry in instance network info cache for port e0aae0a1-c525-4233-b876-799c11006f75. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1609.996819] env[63297]: DEBUG nova.network.neutron [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance_info_cache with network_info: [{"id": "e0aae0a1-c525-4233-b876-799c11006f75", "address": "fa:16:3e:f7:ce:b0", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0aae0a1-c5", "ovs_interfaceid": "e0aae0a1-c525-4233-b876-799c11006f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.006606] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.007026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.007827] env[63297]: INFO nova.compute.manager [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Attaching volume 2af25825-cee7-42ea-abc3-4b9187975f98 to /dev/sdc [ 1610.044794] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15bd7d9-76d2-45d4-92dc-36c0e750a953 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.052329] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7031e308-8bf6-4859-aee7-49673f5e96e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.066218] env[63297]: DEBUG nova.virt.block_device [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updating existing volume attachment record: fa3ca8ee-c30f-4798-aafe-6e25d925f172 {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1610.072724] env[63297]: DEBUG nova.scheduler.client.report [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1610.239291] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697963, 'name': CreateVM_Task, 'duration_secs': 0.356773} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.244355] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1610.245249] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.245415] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.245726] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1610.246620] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07d3482e-3fc7-44c4-b852-399d73d62531 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.251351] env[63297]: DEBUG oslo_vmware.api [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697964, 'name': PowerOffVM_Task, 'duration_secs': 0.163237} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.251592] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1610.251758] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1610.251982] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35eb28c7-af37-4d17-8d5b-1ea5018d0e0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.254361] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1610.254361] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e9bd9f-3ff8-4cf2-b2d1-6d504335f2ac" [ 1610.254361] env[63297]: _type = "Task" [ 1610.254361] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.262244] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e9bd9f-3ff8-4cf2-b2d1-6d504335f2ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.280579] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1610.280900] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8b763537-f95b-4d35-a72c-1bfd88e241b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.289403] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1610.289403] env[63297]: value = "task-1697967" [ 1610.289403] env[63297]: _type = "Task" [ 1610.289403] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.298043] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697967, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.362697] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Updating instance_info_cache with network_info: [{"id": "468613e2-02e8-4bf5-9887-fc0f90ff2f75", "address": "fa:16:3e:bb:c6:4d", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap468613e2-02", "ovs_interfaceid": "468613e2-02e8-4bf5-9887-fc0f90ff2f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.499431] env[63297]: DEBUG oslo_concurrency.lockutils [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] Releasing lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.499768] env[63297]: DEBUG nova.compute.manager [req-59d9d33e-a491-4968-b6d6-67d192292e12 req-d865366d-aec5-4adf-9b8c-575491e23cae service nova] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Received event network-vif-deleted-8c7f6ca3-5c56-4270-88a0-28ddd06a0256 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1610.576905] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.579479] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.234s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.581012] env[63297]: INFO nova.compute.claims [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1610.604811] env[63297]: INFO nova.scheduler.client.report [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted allocations for instance a10df0e9-4278-48f1-b111-864ac793f630 [ 1610.694188] env[63297]: DEBUG nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1610.719761] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1610.720025] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1610.720192] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1610.720374] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1610.720524] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1610.720671] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1610.720910] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1610.721086] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1610.721260] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1610.721420] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1610.721594] env[63297]: DEBUG nova.virt.hardware [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1610.722496] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e949fc-e65e-4c31-8c0a-058c987ee166 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.730738] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db92b12-700a-40b4-94b3-2e9b4dc14c45 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.763982] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e9bd9f-3ff8-4cf2-b2d1-6d504335f2ac, 'name': SearchDatastore_Task, 'duration_secs': 0.015114} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.764298] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.764527] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1610.764760] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.764909] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.765104] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1610.765355] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b88fa8f-39f0-434e-9eee-4748b9e1d4a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.773652] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1610.773825] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1610.774527] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b582db2f-6d8a-4402-96e0-5ef6bc6a5341 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.780064] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1610.780064] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e8a9c1-d473-e44b-2418-0ac19f4cf234" [ 1610.780064] env[63297]: _type = "Task" [ 1610.780064] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.787945] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e8a9c1-d473-e44b-2418-0ac19f4cf234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.798696] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697967, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.866671] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.866845] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1610.867099] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1610.867251] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1610.867396] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1610.867561] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1610.867794] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1610.867939] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1610.868078] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1610.868226] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.009519] env[63297]: DEBUG nova.network.neutron [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Successfully updated port: bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1611.035206] env[63297]: DEBUG nova.compute.manager [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received event network-vif-plugged-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1611.035206] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.035206] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.035206] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.035206] env[63297]: DEBUG nova.compute.manager [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] No waiting events found dispatching network-vif-plugged-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1611.035206] env[63297]: WARNING nova.compute.manager [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received unexpected event network-vif-plugged-bb862c99-f006-416a-9b98-0fb287a5d194 for instance with vm_state building and task_state spawning. [ 1611.035206] env[63297]: DEBUG nova.compute.manager [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received event network-changed-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1611.035206] env[63297]: DEBUG nova.compute.manager [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Refreshing instance network info cache due to event network-changed-bb862c99-f006-416a-9b98-0fb287a5d194. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1611.035206] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] Acquiring lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.035677] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] Acquired lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.035677] env[63297]: DEBUG nova.network.neutron [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Refreshing network info cache for port bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1611.113538] env[63297]: DEBUG oslo_concurrency.lockutils [None req-902e7082-879d-4e7c-85c9-e03fe49c5477 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a10df0e9-4278-48f1-b111-864ac793f630" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.800s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.261176] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1611.261399] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1611.261580] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleting the datastore file [datastore1] f9ad9854-2f5b-4edd-9636-8d36d0a89e89 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1611.261840] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1583ebf-37df-4126-9716-49338e3796de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.268607] env[63297]: DEBUG oslo_vmware.api [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1611.268607] env[63297]: value = "task-1697968" [ 1611.268607] env[63297]: _type = "Task" [ 1611.268607] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.276509] env[63297]: DEBUG oslo_vmware.api [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697968, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.288852] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e8a9c1-d473-e44b-2418-0ac19f4cf234, 'name': SearchDatastore_Task, 'duration_secs': 0.008853} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.289905] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c45a506-61d3-4377-9bc1-ec21613ba4fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.299762] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1611.299762] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb124e-407c-e157-e441-a7089d89f496" [ 1611.299762] env[63297]: _type = "Task" [ 1611.299762] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.302958] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697967, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.311426] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb124e-407c-e157-e441-a7089d89f496, 'name': SearchDatastore_Task, 'duration_secs': 0.009777} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.311668] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.311928] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] a6d86e78-ae24-4e70-9fb2-270177b40322/a6d86e78-ae24-4e70-9fb2-270177b40322.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1611.312188] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3afa4175-ffe4-49fa-9506-52eec052dd8f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.318446] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1611.318446] env[63297]: value = "task-1697969" [ 1611.318446] env[63297]: _type = "Task" [ 1611.318446] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.326306] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697969, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.371692] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.513832] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.568890] env[63297]: DEBUG nova.network.neutron [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1611.664292] env[63297]: DEBUG nova.network.neutron [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.781328] env[63297]: DEBUG oslo_vmware.api [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1697968, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137853} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.781670] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1611.781774] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1611.781953] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1611.782191] env[63297]: INFO nova.compute.manager [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Took 2.09 seconds to destroy the instance on the hypervisor. [ 1611.782445] env[63297]: DEBUG oslo.service.loopingcall [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1611.782645] env[63297]: DEBUG nova.compute.manager [-] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1611.782815] env[63297]: DEBUG nova.network.neutron [-] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1611.801636] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697967, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.831249] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697969, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489758} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.831524] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] a6d86e78-ae24-4e70-9fb2-270177b40322/a6d86e78-ae24-4e70-9fb2-270177b40322.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1611.831735] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1611.831991] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-926697c8-d949-462b-bd23-6303eaaafed3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.838873] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1611.838873] env[63297]: value = "task-1697970" [ 1611.838873] env[63297]: _type = "Task" [ 1611.838873] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.846695] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.905867] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68095486-8ea7-45f1-9db2-1c54da978142 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.913485] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fe2f52-e61f-4d95-94c7-57ef9f61c3ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.945861] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58331bcf-8406-4963-86e1-f3788f6dae04 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.953458] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8aaed2-8029-4895-87b5-569f7173cb13 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.967093] env[63297]: DEBUG nova.compute.provider_tree [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1612.167027] env[63297]: DEBUG oslo_concurrency.lockutils [req-e2b62493-8f89-4b2d-8e69-e1bdcc22ee6f req-e9f5d10d-8aab-4ba4-a5ee-f83043c2521f service nova] Releasing lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.167321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.167404] env[63297]: DEBUG nova.network.neutron [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1612.303399] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697967, 'name': CloneVM_Task, 'duration_secs': 1.801917} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.303682] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Created linked-clone VM from snapshot [ 1612.304749] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f62fb6-d95c-4373-9d11-d1ee16e988bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.313535] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Uploading image a8c08d8e-6819-4c48-bb60-faff753cbe12 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1612.334197] env[63297]: DEBUG oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1612.334197] env[63297]: value = "vm-353977" [ 1612.334197] env[63297]: _type = "VirtualMachine" [ 1612.334197] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1612.334458] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-decae32b-141a-41f1-b4d0-1cceeae14055 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.343302] env[63297]: DEBUG oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease: (returnval){ [ 1612.343302] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ac4581-ce60-0125-4b17-bcec4abdd40e" [ 1612.343302] env[63297]: _type = "HttpNfcLease" [ 1612.343302] env[63297]: } obtained for exporting VM: (result){ [ 1612.343302] env[63297]: value = "vm-353977" [ 1612.343302] env[63297]: _type = "VirtualMachine" [ 1612.343302] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1612.343567] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the lease: (returnval){ [ 1612.343567] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ac4581-ce60-0125-4b17-bcec4abdd40e" [ 1612.343567] env[63297]: _type = "HttpNfcLease" [ 1612.343567] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1612.349483] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070262} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.350106] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1612.350823] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a692d4-87ad-40ef-b26c-57f4577a6263 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.354150] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1612.354150] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ac4581-ce60-0125-4b17-bcec4abdd40e" [ 1612.354150] env[63297]: _type = "HttpNfcLease" [ 1612.354150] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1612.354677] env[63297]: DEBUG oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1612.354677] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ac4581-ce60-0125-4b17-bcec4abdd40e" [ 1612.354677] env[63297]: _type = "HttpNfcLease" [ 1612.354677] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1612.355353] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd5b84e-8d7e-45f0-89e3-012ed6dfefe6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.374536] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] a6d86e78-ae24-4e70-9fb2-270177b40322/a6d86e78-ae24-4e70-9fb2-270177b40322.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1612.375067] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24eea7f1-47bc-4b05-973d-6c7614ce2fb7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.391823] env[63297]: DEBUG oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dd5a28-2d6f-8fb3-43d7-81ef2d6e81ab/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1612.391990] env[63297]: DEBUG oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dd5a28-2d6f-8fb3-43d7-81ef2d6e81ab/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1612.449465] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1612.449465] env[63297]: value = "task-1697973" [ 1612.449465] env[63297]: _type = "Task" [ 1612.449465] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.458636] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.470626] env[63297]: DEBUG nova.scheduler.client.report [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1612.478580] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6866ab22-bbd9-4d8d-8997-626c9e180c4b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.515427] env[63297]: DEBUG nova.network.neutron [-] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.700344] env[63297]: DEBUG nova.network.neutron [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1612.861647] env[63297]: DEBUG nova.network.neutron [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [{"id": "bb862c99-f006-416a-9b98-0fb287a5d194", "address": "fa:16:3e:03:4f:8d", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb862c99-f0", "ovs_interfaceid": "bb862c99-f006-416a-9b98-0fb287a5d194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.959844] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.976371] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.976847] env[63297]: DEBUG nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1612.979865] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.042s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.980214] env[63297]: DEBUG nova.objects.instance [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lazy-loading 'resources' on Instance uuid cc644ecc-7340-421c-b966-19145eb82949 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1613.018200] env[63297]: INFO nova.compute.manager [-] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Took 1.24 seconds to deallocate network for instance. [ 1613.063228] env[63297]: DEBUG nova.compute.manager [req-bcc5c286-38d6-41c6-bb98-348921e4e486 req-6dd8a5a2-bca0-4db1-9100-6fb2f8e7d192 service nova] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Received event network-vif-deleted-6b9e7381-f512-4fe3-9eb0-f334dbb61211 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1613.369024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.369024] env[63297]: DEBUG nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Instance network_info: |[{"id": "bb862c99-f006-416a-9b98-0fb287a5d194", "address": "fa:16:3e:03:4f:8d", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb862c99-f0", "ovs_interfaceid": "bb862c99-f006-416a-9b98-0fb287a5d194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1613.369024] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:4f:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb862c99-f006-416a-9b98-0fb287a5d194', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1613.379834] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating folder: Project (01fe9157b11244cb86a7626caae0616d). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1613.380191] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e70e28c-8a98-4dfa-8923-3c52bbe7977e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.391712] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Created folder: Project (01fe9157b11244cb86a7626caae0616d) in parent group-v353718. [ 1613.392130] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating folder: Instances. Parent ref: group-v353978. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1613.392414] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03cbdfe8-b6bc-45ee-b5d7-7f3e934ffc80 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.402837] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Created folder: Instances in parent group-v353978. [ 1613.403692] env[63297]: DEBUG oslo.service.loopingcall [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1613.403692] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1613.403692] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-736e9d35-da72-4b28-b9a6-c0ad9709dd52 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.425947] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1613.425947] env[63297]: value = "task-1697976" [ 1613.425947] env[63297]: _type = "Task" [ 1613.425947] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.434400] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697976, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.464309] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697973, 'name': ReconfigVM_Task, 'duration_secs': 0.727926} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.464415] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Reconfigured VM instance instance-0000005d to attach disk [datastore1] a6d86e78-ae24-4e70-9fb2-270177b40322/a6d86e78-ae24-4e70-9fb2-270177b40322.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1613.465219] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea8d42ae-9783-4264-94d8-4d6522b2417b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.472341] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1613.472341] env[63297]: value = "task-1697977" [ 1613.472341] env[63297]: _type = "Task" [ 1613.472341] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.491754] env[63297]: DEBUG nova.compute.utils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1613.503366] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697977, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.504431] env[63297]: DEBUG nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1613.504783] env[63297]: DEBUG nova.network.neutron [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1613.526254] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.571388] env[63297]: DEBUG nova.policy [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0da2fdb3c81747698f971951c5e0068b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc8039a70b34a269d3aed1ecb558b7e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1613.917364] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c86469d-7603-4c47-9b75-faf3fd3496ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.921562] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7132d1-90ca-43d8-ad9e-a66d310d9067 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.966215] env[63297]: DEBUG nova.network.neutron [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Successfully created port: b317a4c8-cd02-4515-9830-7536b71d4451 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1613.970954] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ce7cd1-1da3-48aa-a7a8-6ab4c9335ca9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.977422] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697976, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.986128] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697977, 'name': Rename_Task, 'duration_secs': 0.206451} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.988483] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1613.988848] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-614d29b3-68df-4ff0-93f3-6df5035de87d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.991662] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e49a612-2f36-49af-a23e-9542e4298de7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.999128] env[63297]: DEBUG nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1614.010473] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1614.010473] env[63297]: value = "task-1697978" [ 1614.010473] env[63297]: _type = "Task" [ 1614.010473] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.011051] env[63297]: DEBUG nova.compute.provider_tree [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1614.021499] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697978, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.437095] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697976, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.517716] env[63297]: DEBUG nova.scheduler.client.report [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1614.530607] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697978, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.612802] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1614.613064] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353976', 'volume_id': '2af25825-cee7-42ea-abc3-4b9187975f98', 'name': 'volume-2af25825-cee7-42ea-abc3-4b9187975f98', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b7b9f1b-d277-4219-92fb-e35a8b867e77', 'attached_at': '', 'detached_at': '', 'volume_id': '2af25825-cee7-42ea-abc3-4b9187975f98', 'serial': '2af25825-cee7-42ea-abc3-4b9187975f98'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1614.614332] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37a967c-df11-4bbf-bfd8-6a32066404a3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.633240] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5270d4a0-732a-4dff-ab41-f7137d61ab05 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.465557] env[63297]: DEBUG nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1615.468325] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.488s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.479593] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] volume-2af25825-cee7-42ea-abc3-4b9187975f98/volume-2af25825-cee7-42ea-abc3-4b9187975f98.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1615.487096] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.730s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.490467] env[63297]: INFO nova.compute.claims [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1615.492846] env[63297]: DEBUG nova.compute.manager [req-43842bd9-3fdf-4a93-b0c0-fa2b03a77918 req-7e2bebf5-a6c9-4823-969a-5a1ce2ffdae1 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Received event network-vif-plugged-b317a4c8-cd02-4515-9830-7536b71d4451 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1615.492846] env[63297]: DEBUG oslo_concurrency.lockutils [req-43842bd9-3fdf-4a93-b0c0-fa2b03a77918 req-7e2bebf5-a6c9-4823-969a-5a1ce2ffdae1 service nova] Acquiring lock "fd178a30-b5f0-4019-a05f-f1928e1d122a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.493351] env[63297]: DEBUG oslo_concurrency.lockutils [req-43842bd9-3fdf-4a93-b0c0-fa2b03a77918 req-7e2bebf5-a6c9-4823-969a-5a1ce2ffdae1 service nova] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.493351] env[63297]: DEBUG oslo_concurrency.lockutils [req-43842bd9-3fdf-4a93-b0c0-fa2b03a77918 req-7e2bebf5-a6c9-4823-969a-5a1ce2ffdae1 service nova] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.493351] env[63297]: DEBUG nova.compute.manager [req-43842bd9-3fdf-4a93-b0c0-fa2b03a77918 req-7e2bebf5-a6c9-4823-969a-5a1ce2ffdae1 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] No waiting events found dispatching network-vif-plugged-b317a4c8-cd02-4515-9830-7536b71d4451 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1615.493498] env[63297]: WARNING nova.compute.manager [req-43842bd9-3fdf-4a93-b0c0-fa2b03a77918 req-7e2bebf5-a6c9-4823-969a-5a1ce2ffdae1 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Received unexpected event network-vif-plugged-b317a4c8-cd02-4515-9830-7536b71d4451 for instance with vm_state building and task_state spawning. [ 1615.496352] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73e09e7a-8ca8-461a-af75-db644f736e59 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.510741] env[63297]: INFO nova.scheduler.client.report [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Deleted allocations for instance cc644ecc-7340-421c-b966-19145eb82949 [ 1615.524199] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697976, 'name': CreateVM_Task, 'duration_secs': 1.443823} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.524523] env[63297]: DEBUG oslo_vmware.api [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1697978, 'name': PowerOnVM_Task, 'duration_secs': 1.281552} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.526977] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1615.527410] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1615.527652] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1615.527917] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1615.528152] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1615.528367] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1615.528612] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1615.528779] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1615.529050] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1615.529303] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1615.529615] env[63297]: DEBUG nova.virt.hardware [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1615.530979] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1615.531353] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1615.531679] env[63297]: INFO nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1615.531877] env[63297]: DEBUG nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1615.533634] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b3680d-873f-4356-83a2-3411763c82e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.535972] env[63297]: DEBUG oslo_vmware.api [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1615.535972] env[63297]: value = "task-1697979" [ 1615.535972] env[63297]: _type = "Task" [ 1615.535972] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.536643] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.536802] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.537132] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1615.537901] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fb567b-1e90-43d1-8340-331d6ec2c6e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.541867] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fde302b-c4c5-4d98-b1eb-4ca516e185b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.551860] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f065ac04-74f6-4194-9121-26fa54c6299f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.566559] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1615.566559] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5259ddc8-f876-9e60-f1a8-6ff69e87ca9b" [ 1615.566559] env[63297]: _type = "Task" [ 1615.566559] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.566559] env[63297]: DEBUG oslo_vmware.api [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697979, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.584034] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5259ddc8-f876-9e60-f1a8-6ff69e87ca9b, 'name': SearchDatastore_Task, 'duration_secs': 0.013314} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.584355] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.584638] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1615.584879] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.585045] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.585266] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1615.585539] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0d6ff20-8250-4c3e-844b-bae6dfc41c58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.594981] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1615.594981] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1615.595497] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67adda61-00ff-4dd9-aefd-7649c177fe73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.601769] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1615.601769] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ae274c-6d41-10a7-1e39-1e47b1f33989" [ 1615.601769] env[63297]: _type = "Task" [ 1615.601769] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.610965] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ae274c-6d41-10a7-1e39-1e47b1f33989, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.625841] env[63297]: DEBUG nova.network.neutron [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Successfully updated port: b317a4c8-cd02-4515-9830-7536b71d4451 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1616.018589] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cd235488-4dfc-4fc5-96dd-0f7d6f66d739 tempest-ServerGroupTestJSON-677506932 tempest-ServerGroupTestJSON-677506932-project-member] Lock "cc644ecc-7340-421c-b966-19145eb82949" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.047s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.052734] env[63297]: DEBUG oslo_vmware.api [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697979, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.072833] env[63297]: INFO nova.compute.manager [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Took 20.92 seconds to build instance. [ 1616.112486] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ae274c-6d41-10a7-1e39-1e47b1f33989, 'name': SearchDatastore_Task, 'duration_secs': 0.012154} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.113482] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-266753b7-f126-4645-ba35-c5e146343dd7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.119566] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1616.119566] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527e567b-db4b-a2f7-8aae-2dd218bbda48" [ 1616.119566] env[63297]: _type = "Task" [ 1616.119566] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.127939] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-fd178a30-b5f0-4019-a05f-f1928e1d122a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.128154] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-fd178a30-b5f0-4019-a05f-f1928e1d122a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.128364] env[63297]: DEBUG nova.network.neutron [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1616.129665] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527e567b-db4b-a2f7-8aae-2dd218bbda48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.551697] env[63297]: DEBUG oslo_vmware.api [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697979, 'name': ReconfigVM_Task, 'duration_secs': 0.637905} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.552012] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfigured VM instance instance-00000052 to attach disk [datastore1] volume-2af25825-cee7-42ea-abc3-4b9187975f98/volume-2af25825-cee7-42ea-abc3-4b9187975f98.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1616.559668] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-730f1c74-b2c1-450b-96e2-06c0ff972e5c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.575544] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9e784392-fd10-4d9d-b758-a3d3b89e9fd1 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.431s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.575881] env[63297]: DEBUG oslo_vmware.api [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1616.575881] env[63297]: value = "task-1697980" [ 1616.575881] env[63297]: _type = "Task" [ 1616.575881] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.589712] env[63297]: DEBUG oslo_vmware.api [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697980, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.633273] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527e567b-db4b-a2f7-8aae-2dd218bbda48, 'name': SearchDatastore_Task, 'duration_secs': 0.012528} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.635700] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.635990] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1/427c4ff0-1bf1-4bfb-b5c6-de6659148ab1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1616.637190] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7242e59a-9ab1-46ae-821a-09c91f202e8f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.644573] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1616.644573] env[63297]: value = "task-1697981" [ 1616.644573] env[63297]: _type = "Task" [ 1616.644573] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.656304] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.670139] env[63297]: DEBUG nova.network.neutron [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1616.849342] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544407ee-e270-4153-83da-e5475de73b56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.857405] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff7d699-1553-4a39-bfdb-c04a153d23c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.890864] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41f05bc-edb2-418c-b909-1afa7570ff1c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.899034] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72819b0f-333f-40a7-ba3c-71074d1a84fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.916485] env[63297]: DEBUG nova.compute.provider_tree [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1616.976562] env[63297]: DEBUG nova.network.neutron [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Updating instance_info_cache with network_info: [{"id": "b317a4c8-cd02-4515-9830-7536b71d4451", "address": "fa:16:3e:17:96:4c", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb317a4c8-cd", "ovs_interfaceid": "b317a4c8-cd02-4515-9830-7536b71d4451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.085954] env[63297]: DEBUG oslo_vmware.api [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697980, 'name': ReconfigVM_Task, 'duration_secs': 0.193486} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.086346] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353976', 'volume_id': '2af25825-cee7-42ea-abc3-4b9187975f98', 'name': 'volume-2af25825-cee7-42ea-abc3-4b9187975f98', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b7b9f1b-d277-4219-92fb-e35a8b867e77', 'attached_at': '', 'detached_at': '', 'volume_id': '2af25825-cee7-42ea-abc3-4b9187975f98', 'serial': '2af25825-cee7-42ea-abc3-4b9187975f98'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1617.157243] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697981, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.422737] env[63297]: DEBUG nova.scheduler.client.report [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1617.464332] env[63297]: DEBUG nova.compute.manager [req-c5e60a4d-5ebd-4bc8-8318-3de04b465d91 req-bb1d3275-3e49-4f7e-8593-93624b388c68 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Received event network-changed-b317a4c8-cd02-4515-9830-7536b71d4451 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1617.464503] env[63297]: DEBUG nova.compute.manager [req-c5e60a4d-5ebd-4bc8-8318-3de04b465d91 req-bb1d3275-3e49-4f7e-8593-93624b388c68 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Refreshing instance network info cache due to event network-changed-b317a4c8-cd02-4515-9830-7536b71d4451. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1617.464627] env[63297]: DEBUG oslo_concurrency.lockutils [req-c5e60a4d-5ebd-4bc8-8318-3de04b465d91 req-bb1d3275-3e49-4f7e-8593-93624b388c68 service nova] Acquiring lock "refresh_cache-fd178a30-b5f0-4019-a05f-f1928e1d122a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.482889] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-fd178a30-b5f0-4019-a05f-f1928e1d122a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.483302] env[63297]: DEBUG nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Instance network_info: |[{"id": "b317a4c8-cd02-4515-9830-7536b71d4451", "address": "fa:16:3e:17:96:4c", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb317a4c8-cd", "ovs_interfaceid": "b317a4c8-cd02-4515-9830-7536b71d4451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1617.483630] env[63297]: DEBUG oslo_concurrency.lockutils [req-c5e60a4d-5ebd-4bc8-8318-3de04b465d91 req-bb1d3275-3e49-4f7e-8593-93624b388c68 service nova] Acquired lock "refresh_cache-fd178a30-b5f0-4019-a05f-f1928e1d122a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.483860] env[63297]: DEBUG nova.network.neutron [req-c5e60a4d-5ebd-4bc8-8318-3de04b465d91 req-bb1d3275-3e49-4f7e-8593-93624b388c68 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Refreshing network info cache for port b317a4c8-cd02-4515-9830-7536b71d4451 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1617.485280] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:96:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b317a4c8-cd02-4515-9830-7536b71d4451', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1617.494963] env[63297]: DEBUG oslo.service.loopingcall [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.502024] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1617.502024] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f64977d8-a39a-409c-8764-c58ebb3e8f32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.523094] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1617.523094] env[63297]: value = "task-1697982" [ 1617.523094] env[63297]: _type = "Task" [ 1617.523094] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.532798] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697982, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.656509] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697981, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615902} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.656509] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1/427c4ff0-1bf1-4bfb-b5c6-de6659148ab1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1617.656878] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1617.656878] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f22c08a7-3922-45bd-a0c5-5a31a6fbda08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.664039] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1617.664039] env[63297]: value = "task-1697983" [ 1617.664039] env[63297]: _type = "Task" [ 1617.664039] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.677165] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697983, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.928891] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.929533] env[63297]: DEBUG nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1617.932524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.841s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.934416] env[63297]: INFO nova.compute.claims [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1617.983628] env[63297]: DEBUG nova.network.neutron [req-c5e60a4d-5ebd-4bc8-8318-3de04b465d91 req-bb1d3275-3e49-4f7e-8593-93624b388c68 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Updated VIF entry in instance network info cache for port b317a4c8-cd02-4515-9830-7536b71d4451. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1617.983926] env[63297]: DEBUG nova.network.neutron [req-c5e60a4d-5ebd-4bc8-8318-3de04b465d91 req-bb1d3275-3e49-4f7e-8593-93624b388c68 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Updating instance_info_cache with network_info: [{"id": "b317a4c8-cd02-4515-9830-7536b71d4451", "address": "fa:16:3e:17:96:4c", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb317a4c8-cd", "ovs_interfaceid": "b317a4c8-cd02-4515-9830-7536b71d4451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.034131] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697982, 'name': CreateVM_Task, 'duration_secs': 0.386196} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.034131] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1618.034850] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.035599] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.036183] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1618.037572] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-172384cc-3cb5-4797-91c0-addcfe526934 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.045041] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1618.045041] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ce8617-668b-1e9e-4c03-e9b22710d1cd" [ 1618.045041] env[63297]: _type = "Task" [ 1618.045041] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.053809] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ce8617-668b-1e9e-4c03-e9b22710d1cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.136773] env[63297]: DEBUG nova.objects.instance [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 0b7b9f1b-d277-4219-92fb-e35a8b867e77 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1618.177669] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697983, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077208} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.177669] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1618.178187] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1749de6-67ef-4f92-af1b-6bb84fd098db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.204923] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1/427c4ff0-1bf1-4bfb-b5c6-de6659148ab1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1618.205333] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a6a694b-bcd0-45de-8638-a47d25c4f292 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.227793] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1618.227793] env[63297]: value = "task-1697984" [ 1618.227793] env[63297]: _type = "Task" [ 1618.227793] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.238364] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697984, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.438911] env[63297]: DEBUG nova.compute.utils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1618.445023] env[63297]: DEBUG nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1618.445023] env[63297]: DEBUG nova.network.neutron [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1618.491824] env[63297]: DEBUG oslo_concurrency.lockutils [req-c5e60a4d-5ebd-4bc8-8318-3de04b465d91 req-bb1d3275-3e49-4f7e-8593-93624b388c68 service nova] Releasing lock "refresh_cache-fd178a30-b5f0-4019-a05f-f1928e1d122a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.562039] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ce8617-668b-1e9e-4c03-e9b22710d1cd, 'name': SearchDatastore_Task, 'duration_secs': 0.014878} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.563045] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.564178] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1618.565660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.566034] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.566443] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1618.569018] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a223af8a-59f0-4985-a9ad-2cfdba4a3f29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.571523] env[63297]: DEBUG nova.policy [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d8413d4aad4ed8a1fa9e436de117ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc81b0f87c64b2283eb0ece21fb31a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1618.585149] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1618.585149] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1618.585149] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3a3b523-8052-4089-b3cf-6a167983e51d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.594064] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1618.594064] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d89677-1c38-a7f1-f112-a44315fe87a6" [ 1618.594064] env[63297]: _type = "Task" [ 1618.594064] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.601224] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d89677-1c38-a7f1-f112-a44315fe87a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.641366] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2716a45a-4e43-4585-aaed-5a1a75c9e669 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.634s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.745364] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697984, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.749744] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.749744] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.933186] env[63297]: DEBUG nova.compute.manager [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1618.948582] env[63297]: DEBUG nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1618.966123] env[63297]: DEBUG nova.network.neutron [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Successfully created port: 67f99216-a730-4066-be67-21fcb4979776 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1619.105850] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d89677-1c38-a7f1-f112-a44315fe87a6, 'name': SearchDatastore_Task, 'duration_secs': 0.015458} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.107508] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ae84206-8b72-4d6f-90c2-238a1474ae61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.114032] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1619.114032] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529fcce2-5824-c04e-bf1e-8ff5d68776fd" [ 1619.114032] env[63297]: _type = "Task" [ 1619.114032] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.126797] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529fcce2-5824-c04e-bf1e-8ff5d68776fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.240190] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697984, 'name': ReconfigVM_Task, 'duration_secs': 0.516613} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.243732] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1/427c4ff0-1bf1-4bfb-b5c6-de6659148ab1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1619.244824] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dceeab69-0ad7-400c-abf2-9f5510ebf3a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.251826] env[63297]: INFO nova.compute.manager [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Detaching volume 86accfd5-39e8-4bfb-a471-18e234f14803 [ 1619.253796] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1619.253796] env[63297]: value = "task-1697985" [ 1619.253796] env[63297]: _type = "Task" [ 1619.253796] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.266682] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697985, 'name': Rename_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.296189] env[63297]: INFO nova.virt.block_device [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Attempting to driver detach volume 86accfd5-39e8-4bfb-a471-18e234f14803 from mountpoint /dev/sdb [ 1619.296444] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1619.296632] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353972', 'volume_id': '86accfd5-39e8-4bfb-a471-18e234f14803', 'name': 'volume-86accfd5-39e8-4bfb-a471-18e234f14803', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b7b9f1b-d277-4219-92fb-e35a8b867e77', 'attached_at': '', 'detached_at': '', 'volume_id': '86accfd5-39e8-4bfb-a471-18e234f14803', 'serial': '86accfd5-39e8-4bfb-a471-18e234f14803'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1619.297542] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc2eaa5-5020-44ac-9e89-49c4536c9433 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.326083] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef06c385-0598-4910-bd52-a89a69acba37 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.328941] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be9eeec-eef3-4207-9502-35f139456b15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.339635] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490d8690-61d0-4ece-9d8b-4dd89c5b80ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.343453] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c137b8c2-ed63-4568-ad87-20e527cbb479 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.391098] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4cdb35-9166-40d9-af4f-508a863ab8ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.394633] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1934064-c435-44f7-9a68-918823340779 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.413314] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] The volume has not been displaced from its original location: [datastore1] volume-86accfd5-39e8-4bfb-a471-18e234f14803/volume-86accfd5-39e8-4bfb-a471-18e234f14803.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1619.418814] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfiguring VM instance instance-00000052 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1619.419199] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a626c689-0c7a-4d36-84e4-45de7818f3d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.434221] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f356f8-8b0c-4ebf-9ced-ed5301a3dc81 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.451305] env[63297]: DEBUG nova.compute.provider_tree [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.454114] env[63297]: DEBUG oslo_vmware.api [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1619.454114] env[63297]: value = "task-1697986" [ 1619.454114] env[63297]: _type = "Task" [ 1619.454114] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.466203] env[63297]: DEBUG oslo_vmware.api [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697986, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.466861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.628545] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529fcce2-5824-c04e-bf1e-8ff5d68776fd, 'name': SearchDatastore_Task, 'duration_secs': 0.037128} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.629029] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.629124] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fd178a30-b5f0-4019-a05f-f1928e1d122a/fd178a30-b5f0-4019-a05f-f1928e1d122a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1619.629468] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3aa3966-2d10-47a4-9941-cc6d826c17ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.637985] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1619.637985] env[63297]: value = "task-1697987" [ 1619.637985] env[63297]: _type = "Task" [ 1619.637985] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.650052] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.764451] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697985, 'name': Rename_Task, 'duration_secs': 0.20558} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.764791] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1619.765171] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08377a75-aeed-4330-a7b8-cb43333c3380 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.773420] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1619.773420] env[63297]: value = "task-1697988" [ 1619.773420] env[63297]: _type = "Task" [ 1619.773420] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.788509] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697988, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.956037] env[63297]: DEBUG nova.scheduler.client.report [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1619.965996] env[63297]: DEBUG nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1619.976555] env[63297]: DEBUG oslo_vmware.api [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697986, 'name': ReconfigVM_Task, 'duration_secs': 0.346888} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.976851] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfigured VM instance instance-00000052 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1619.981927] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59343020-8d8c-4231-8d78-043b6c799282 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.003382] env[63297]: DEBUG oslo_vmware.api [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1620.003382] env[63297]: value = "task-1697989" [ 1620.003382] env[63297]: _type = "Task" [ 1620.003382] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.005916] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1620.006072] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1620.006226] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1620.006409] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1620.006555] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1620.006700] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1620.006906] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1620.007623] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1620.007623] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1620.007623] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1620.007623] env[63297]: DEBUG nova.virt.hardware [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1620.008938] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd56cd0-5d7f-4ed7-9da8-49c602ba9967 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.023269] env[63297]: DEBUG oslo_vmware.api [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697989, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.024724] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ca061f-0529-4eff-b68f-82cd3ecbb37b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.149309] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697987, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.285725] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697988, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.307903] env[63297]: DEBUG oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dd5a28-2d6f-8fb3-43d7-81ef2d6e81ab/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1620.308895] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cfb696-3fbc-4d43-9870-8f718f2b46bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.315951] env[63297]: DEBUG oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dd5a28-2d6f-8fb3-43d7-81ef2d6e81ab/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1620.315951] env[63297]: ERROR oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dd5a28-2d6f-8fb3-43d7-81ef2d6e81ab/disk-0.vmdk due to incomplete transfer. [ 1620.316202] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-71e0c8f1-9753-42b8-84bf-e8efbbdb9e1d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.325477] env[63297]: DEBUG oslo_vmware.rw_handles [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dd5a28-2d6f-8fb3-43d7-81ef2d6e81ab/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1620.325660] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Uploaded image a8c08d8e-6819-4c48-bb60-faff753cbe12 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1620.328278] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1620.328559] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6c4467a5-4a4e-4694-a55e-9b6b1dd41b0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.335755] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1620.335755] env[63297]: value = "task-1697990" [ 1620.335755] env[63297]: _type = "Task" [ 1620.335755] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.347155] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697990, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.466362] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.466991] env[63297]: DEBUG nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1620.469832] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.173s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.470165] env[63297]: DEBUG nova.objects.instance [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lazy-loading 'resources' on Instance uuid 44f4776e-d4a1-40ad-a03b-bb03582b95bd {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1620.521960] env[63297]: DEBUG oslo_vmware.api [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697989, 'name': ReconfigVM_Task, 'duration_secs': 0.304673} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.522454] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353972', 'volume_id': '86accfd5-39e8-4bfb-a471-18e234f14803', 'name': 'volume-86accfd5-39e8-4bfb-a471-18e234f14803', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b7b9f1b-d277-4219-92fb-e35a8b867e77', 'attached_at': '', 'detached_at': '', 'volume_id': '86accfd5-39e8-4bfb-a471-18e234f14803', 'serial': '86accfd5-39e8-4bfb-a471-18e234f14803'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1620.544341] env[63297]: DEBUG nova.compute.manager [req-094e296f-8c60-4523-8279-5afd61ed819b req-0a167674-8b3b-4848-acb5-f66c3c91f363 service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Received event network-vif-plugged-67f99216-a730-4066-be67-21fcb4979776 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1620.544878] env[63297]: DEBUG oslo_concurrency.lockutils [req-094e296f-8c60-4523-8279-5afd61ed819b req-0a167674-8b3b-4848-acb5-f66c3c91f363 service nova] Acquiring lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.545493] env[63297]: DEBUG oslo_concurrency.lockutils [req-094e296f-8c60-4523-8279-5afd61ed819b req-0a167674-8b3b-4848-acb5-f66c3c91f363 service nova] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.545493] env[63297]: DEBUG oslo_concurrency.lockutils [req-094e296f-8c60-4523-8279-5afd61ed819b req-0a167674-8b3b-4848-acb5-f66c3c91f363 service nova] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.545493] env[63297]: DEBUG nova.compute.manager [req-094e296f-8c60-4523-8279-5afd61ed819b req-0a167674-8b3b-4848-acb5-f66c3c91f363 service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] No waiting events found dispatching network-vif-plugged-67f99216-a730-4066-be67-21fcb4979776 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1620.545976] env[63297]: WARNING nova.compute.manager [req-094e296f-8c60-4523-8279-5afd61ed819b req-0a167674-8b3b-4848-acb5-f66c3c91f363 service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Received unexpected event network-vif-plugged-67f99216-a730-4066-be67-21fcb4979776 for instance with vm_state building and task_state spawning. [ 1620.650759] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697987, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589584} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.651182] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] fd178a30-b5f0-4019-a05f-f1928e1d122a/fd178a30-b5f0-4019-a05f-f1928e1d122a.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1620.651447] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1620.651740] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9912678f-89a2-4559-9270-451bd6c5b689 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.662021] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1620.662021] env[63297]: value = "task-1697991" [ 1620.662021] env[63297]: _type = "Task" [ 1620.662021] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.666851] env[63297]: DEBUG nova.network.neutron [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Successfully updated port: 67f99216-a730-4066-be67-21fcb4979776 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1620.675462] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.786350] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697988, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.845876] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697990, 'name': Destroy_Task, 'duration_secs': 0.377133} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.846079] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Destroyed the VM [ 1620.846301] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1620.846555] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1046f0ea-5f1a-4052-abdb-520660a2dc14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.853994] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1620.853994] env[63297]: value = "task-1697992" [ 1620.853994] env[63297]: _type = "Task" [ 1620.853994] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.862184] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697992, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.973783] env[63297]: DEBUG nova.compute.utils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1620.978343] env[63297]: DEBUG nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1620.978595] env[63297]: DEBUG nova.network.neutron [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1621.030276] env[63297]: DEBUG nova.policy [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ab498375eb47a3923ac10343c11d34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d5cb4b4799b4b8b99648e718dbc0254', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1621.069560] env[63297]: DEBUG nova.objects.instance [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 0b7b9f1b-d277-4219-92fb-e35a8b867e77 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1621.170763] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "refresh_cache-1110d6ca-ca5f-44d1-baca-c22c8fc166b5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.170763] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "refresh_cache-1110d6ca-ca5f-44d1-baca-c22c8fc166b5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.170763] env[63297]: DEBUG nova.network.neutron [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1621.177061] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07433} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.177061] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1621.177061] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c602a3b4-77f8-4b60-a618-84adec4af711 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.199605] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] fd178a30-b5f0-4019-a05f-f1928e1d122a/fd178a30-b5f0-4019-a05f-f1928e1d122a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1621.203955] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58893e01-561d-496b-8e76-79c658d70add {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.226460] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1621.226460] env[63297]: value = "task-1697993" [ 1621.226460] env[63297]: _type = "Task" [ 1621.226460] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.243377] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697993, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.287188] env[63297]: DEBUG oslo_vmware.api [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1697988, 'name': PowerOnVM_Task, 'duration_secs': 1.225716} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.287609] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1621.287914] env[63297]: INFO nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Took 10.59 seconds to spawn the instance on the hypervisor. [ 1621.288360] env[63297]: DEBUG nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1621.289254] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92265e41-9643-4ae9-bc6a-a1c77052b614 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.310250] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353de8f7-4f38-4a4c-83b7-4e203ec17163 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.319580] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9366a49b-3e4e-4331-ab80-5360d43e5f6d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.350252] env[63297]: DEBUG nova.network.neutron [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Successfully created port: dcb70072-f858-420c-861b-1f6f17a1615b {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1621.352703] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a39849-5cf3-4061-818f-beb28c4b4884 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.365123] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697992, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.368017] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7117029-a7c7-40dc-b804-8b39334b3c65 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.382862] env[63297]: DEBUG nova.compute.provider_tree [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.479817] env[63297]: DEBUG nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1621.709616] env[63297]: DEBUG nova.network.neutron [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1621.736909] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697993, 'name': ReconfigVM_Task, 'duration_secs': 0.319541} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.736909] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Reconfigured VM instance instance-0000005f to attach disk [datastore1] fd178a30-b5f0-4019-a05f-f1928e1d122a/fd178a30-b5f0-4019-a05f-f1928e1d122a.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1621.737320] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a704b4bb-7046-469c-9a82-edb7b7fa4113 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.745399] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1621.745399] env[63297]: value = "task-1697994" [ 1621.745399] env[63297]: _type = "Task" [ 1621.745399] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.755806] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697994, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.815590] env[63297]: INFO nova.compute.manager [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Took 26.11 seconds to build instance. [ 1621.870979] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697992, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.886174] env[63297]: DEBUG nova.scheduler.client.report [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1621.910870] env[63297]: DEBUG nova.network.neutron [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Updating instance_info_cache with network_info: [{"id": "67f99216-a730-4066-be67-21fcb4979776", "address": "fa:16:3e:8c:58:12", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67f99216-a7", "ovs_interfaceid": "67f99216-a730-4066-be67-21fcb4979776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.039175] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.079065] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ec639314-a004-48bb-bad3-db36e9480b4e tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.330s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.080274] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.041s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.257291] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697994, 'name': Rename_Task, 'duration_secs': 0.14763} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.257493] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1622.257767] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50f15e23-c04d-4af9-ba3d-59c7a7218af6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.265551] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1622.265551] env[63297]: value = "task-1697995" [ 1622.265551] env[63297]: _type = "Task" [ 1622.265551] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.278243] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.317950] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8eb19609-5d5e-4128-bdf2-b7752754c4d1 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.627s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.366363] env[63297]: DEBUG oslo_vmware.api [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1697992, 'name': RemoveSnapshot_Task, 'duration_secs': 1.442156} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.366649] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1622.366916] env[63297]: INFO nova.compute.manager [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Took 14.66 seconds to snapshot the instance on the hypervisor. [ 1622.391584] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.394534] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.321s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.394881] env[63297]: DEBUG nova.objects.instance [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lazy-loading 'resources' on Instance uuid b261c90f-642d-42b7-8b79-d87eeaf0537a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1622.412583] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "refresh_cache-1110d6ca-ca5f-44d1-baca-c22c8fc166b5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.412909] env[63297]: DEBUG nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Instance network_info: |[{"id": "67f99216-a730-4066-be67-21fcb4979776", "address": "fa:16:3e:8c:58:12", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67f99216-a7", "ovs_interfaceid": "67f99216-a730-4066-be67-21fcb4979776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1622.414919] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:58:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67f99216-a730-4066-be67-21fcb4979776', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1622.428020] env[63297]: DEBUG oslo.service.loopingcall [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1622.430718] env[63297]: INFO nova.scheduler.client.report [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted allocations for instance 44f4776e-d4a1-40ad-a03b-bb03582b95bd [ 1622.432341] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1622.435116] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8decdfd0-8e2e-4a4c-b937-2a6d411cccdd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.456704] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1622.456704] env[63297]: value = "task-1697996" [ 1622.456704] env[63297]: _type = "Task" [ 1622.456704] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.465797] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697996, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.493390] env[63297]: DEBUG nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1622.521961] env[63297]: DEBUG nova.compute.manager [req-ee01cdd8-d60f-4def-b11c-a31e5ee48ce5 req-1f521bfb-2231-476e-b52b-993f83aaebfe service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received event network-changed-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1622.521961] env[63297]: DEBUG nova.compute.manager [req-ee01cdd8-d60f-4def-b11c-a31e5ee48ce5 req-1f521bfb-2231-476e-b52b-993f83aaebfe service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Refreshing instance network info cache due to event network-changed-bb862c99-f006-416a-9b98-0fb287a5d194. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1622.523151] env[63297]: DEBUG oslo_concurrency.lockutils [req-ee01cdd8-d60f-4def-b11c-a31e5ee48ce5 req-1f521bfb-2231-476e-b52b-993f83aaebfe service nova] Acquiring lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.523151] env[63297]: DEBUG oslo_concurrency.lockutils [req-ee01cdd8-d60f-4def-b11c-a31e5ee48ce5 req-1f521bfb-2231-476e-b52b-993f83aaebfe service nova] Acquired lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.523151] env[63297]: DEBUG nova.network.neutron [req-ee01cdd8-d60f-4def-b11c-a31e5ee48ce5 req-1f521bfb-2231-476e-b52b-993f83aaebfe service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Refreshing network info cache for port bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1622.527301] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1622.527301] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1622.527301] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1622.527536] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1622.528530] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1622.528530] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1622.528530] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1622.528530] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1622.528530] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1622.528759] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1622.528759] env[63297]: DEBUG nova.virt.hardware [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1622.530484] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a177db-64bb-42cd-bff8-35054b7857c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.541097] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6645a199-4888-49bf-9285-eba11ed70edc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.574781] env[63297]: DEBUG nova.compute.manager [req-578c4c30-e0c0-4e25-a1d1-9674058f0d5b req-031efe2b-3a5f-4e04-8f68-cee9a5e371b0 service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Received event network-changed-67f99216-a730-4066-be67-21fcb4979776 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1622.575025] env[63297]: DEBUG nova.compute.manager [req-578c4c30-e0c0-4e25-a1d1-9674058f0d5b req-031efe2b-3a5f-4e04-8f68-cee9a5e371b0 service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Refreshing instance network info cache due to event network-changed-67f99216-a730-4066-be67-21fcb4979776. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1622.575267] env[63297]: DEBUG oslo_concurrency.lockutils [req-578c4c30-e0c0-4e25-a1d1-9674058f0d5b req-031efe2b-3a5f-4e04-8f68-cee9a5e371b0 service nova] Acquiring lock "refresh_cache-1110d6ca-ca5f-44d1-baca-c22c8fc166b5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.575445] env[63297]: DEBUG oslo_concurrency.lockutils [req-578c4c30-e0c0-4e25-a1d1-9674058f0d5b req-031efe2b-3a5f-4e04-8f68-cee9a5e371b0 service nova] Acquired lock "refresh_cache-1110d6ca-ca5f-44d1-baca-c22c8fc166b5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.575629] env[63297]: DEBUG nova.network.neutron [req-578c4c30-e0c0-4e25-a1d1-9674058f0d5b req-031efe2b-3a5f-4e04-8f68-cee9a5e371b0 service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Refreshing network info cache for port 67f99216-a730-4066-be67-21fcb4979776 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1622.582474] env[63297]: INFO nova.compute.manager [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Detaching volume 2af25825-cee7-42ea-abc3-4b9187975f98 [ 1622.621888] env[63297]: INFO nova.virt.block_device [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Attempting to driver detach volume 2af25825-cee7-42ea-abc3-4b9187975f98 from mountpoint /dev/sdc [ 1622.622474] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1622.622474] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353976', 'volume_id': '2af25825-cee7-42ea-abc3-4b9187975f98', 'name': 'volume-2af25825-cee7-42ea-abc3-4b9187975f98', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b7b9f1b-d277-4219-92fb-e35a8b867e77', 'attached_at': '', 'detached_at': '', 'volume_id': '2af25825-cee7-42ea-abc3-4b9187975f98', 'serial': '2af25825-cee7-42ea-abc3-4b9187975f98'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1622.623709] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765cd323-d7e3-4195-9083-7688acf2f5c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.648870] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b87fbde-e9b2-4385-86ed-2ae336d2424e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.661600] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bea658c-64bf-47c7-af81-506bc621e4e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.685953] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5eeaa02-633a-451a-9c89-ea971a438855 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.707457] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] The volume has not been displaced from its original location: [datastore1] volume-2af25825-cee7-42ea-abc3-4b9187975f98/volume-2af25825-cee7-42ea-abc3-4b9187975f98.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1622.715789] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfiguring VM instance instance-00000052 to detach disk 2002 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1622.716816] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c606dd9-f5f2-4240-9947-6f50c0336479 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.739672] env[63297]: DEBUG oslo_vmware.api [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1622.739672] env[63297]: value = "task-1697997" [ 1622.739672] env[63297]: _type = "Task" [ 1622.739672] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.758479] env[63297]: DEBUG oslo_vmware.api [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697997, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.777744] env[63297]: DEBUG oslo_vmware.api [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1697995, 'name': PowerOnVM_Task, 'duration_secs': 0.482942} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.777744] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1622.778023] env[63297]: INFO nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1622.778023] env[63297]: DEBUG nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1622.778964] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d633adf-b1a7-4dcd-85f2-f6f776839f7a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.915144] env[63297]: DEBUG nova.compute.manager [None req-a86af03b-dbf0-404c-86f9-1563f536ac9f tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Found 2 images (rotation: 2) {{(pid=63297) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1622.954023] env[63297]: DEBUG oslo_concurrency.lockutils [None req-33044c38-ccc5-45fe-b032-6d191219cca7 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "44f4776e-d4a1-40ad-a03b-bb03582b95bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.795s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.976514] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1697996, 'name': CreateVM_Task, 'duration_secs': 0.394515} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.977439] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1622.980168] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.980168] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.980168] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1622.980168] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aececd60-c3bb-4782-bb14-bccd41e57f36 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.990818] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1622.990818] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52da3d5c-a7ed-4231-7961-1c271c52ae74" [ 1622.990818] env[63297]: _type = "Task" [ 1622.990818] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.999465] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52da3d5c-a7ed-4231-7961-1c271c52ae74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.137134] env[63297]: DEBUG nova.network.neutron [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Successfully updated port: dcb70072-f858-420c-861b-1f6f17a1615b {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1623.250323] env[63297]: DEBUG oslo_vmware.api [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697997, 'name': ReconfigVM_Task, 'duration_secs': 0.349029} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.250676] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Reconfigured VM instance instance-00000052 to detach disk 2002 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1623.259675] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-463e3b7d-c112-45ba-a9f8-cd5f7afb05ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.279170] env[63297]: DEBUG oslo_vmware.api [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1623.279170] env[63297]: value = "task-1697998" [ 1623.279170] env[63297]: _type = "Task" [ 1623.279170] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.292893] env[63297]: DEBUG oslo_vmware.api [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697998, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.298855] env[63297]: INFO nova.compute.manager [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Took 20.98 seconds to build instance. [ 1623.301846] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7e3482-adfa-4a2b-80ac-a6129d696259 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.310738] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084b355a-ea5f-43d1-8630-dfa24923a76a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.346107] env[63297]: DEBUG nova.network.neutron [req-ee01cdd8-d60f-4def-b11c-a31e5ee48ce5 req-1f521bfb-2231-476e-b52b-993f83aaebfe service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updated VIF entry in instance network info cache for port bb862c99-f006-416a-9b98-0fb287a5d194. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1623.346474] env[63297]: DEBUG nova.network.neutron [req-ee01cdd8-d60f-4def-b11c-a31e5ee48ce5 req-1f521bfb-2231-476e-b52b-993f83aaebfe service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [{"id": "bb862c99-f006-416a-9b98-0fb287a5d194", "address": "fa:16:3e:03:4f:8d", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb862c99-f0", "ovs_interfaceid": "bb862c99-f006-416a-9b98-0fb287a5d194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.348728] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541c0926-a340-4620-94a1-5d5cf74318d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.358961] env[63297]: DEBUG nova.network.neutron [req-578c4c30-e0c0-4e25-a1d1-9674058f0d5b req-031efe2b-3a5f-4e04-8f68-cee9a5e371b0 service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Updated VIF entry in instance network info cache for port 67f99216-a730-4066-be67-21fcb4979776. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1623.360911] env[63297]: DEBUG nova.network.neutron [req-578c4c30-e0c0-4e25-a1d1-9674058f0d5b req-031efe2b-3a5f-4e04-8f68-cee9a5e371b0 service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Updating instance_info_cache with network_info: [{"id": "67f99216-a730-4066-be67-21fcb4979776", "address": "fa:16:3e:8c:58:12", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67f99216-a7", "ovs_interfaceid": "67f99216-a730-4066-be67-21fcb4979776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.364029] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef97fa4-0f6c-4e6e-9e4d-1197e43dbcb2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.379680] env[63297]: DEBUG nova.compute.provider_tree [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.500399] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52da3d5c-a7ed-4231-7961-1c271c52ae74, 'name': SearchDatastore_Task, 'duration_secs': 0.013114} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.501852] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.502113] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1623.502369] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.502621] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.502760] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1623.505359] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b72639dd-193e-4e27-adfd-1a1c802b6782 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.516357] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1623.516626] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1623.518314] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0716520-91cd-4c28-99b3-7791da212e67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.526028] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1623.526028] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52725130-bf8d-ecf9-58fc-4f3abb3b5fef" [ 1623.526028] env[63297]: _type = "Task" [ 1623.526028] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.535306] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52725130-bf8d-ecf9-58fc-4f3abb3b5fef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.641023] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "refresh_cache-617544f0-fa53-415d-9f00-c8143e8e25b1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.641023] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "refresh_cache-617544f0-fa53-415d-9f00-c8143e8e25b1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.641023] env[63297]: DEBUG nova.network.neutron [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1623.790405] env[63297]: DEBUG oslo_vmware.api [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1697998, 'name': ReconfigVM_Task, 'duration_secs': 0.229167} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.790707] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353976', 'volume_id': '2af25825-cee7-42ea-abc3-4b9187975f98', 'name': 'volume-2af25825-cee7-42ea-abc3-4b9187975f98', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b7b9f1b-d277-4219-92fb-e35a8b867e77', 'attached_at': '', 'detached_at': '', 'volume_id': '2af25825-cee7-42ea-abc3-4b9187975f98', 'serial': '2af25825-cee7-42ea-abc3-4b9187975f98'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1623.806071] env[63297]: DEBUG oslo_concurrency.lockutils [None req-dff26f16-ca32-4033-afa1-73c7bef73c85 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.496s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.852933] env[63297]: DEBUG oslo_concurrency.lockutils [req-ee01cdd8-d60f-4def-b11c-a31e5ee48ce5 req-1f521bfb-2231-476e-b52b-993f83aaebfe service nova] Releasing lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.870199] env[63297]: DEBUG oslo_concurrency.lockutils [req-578c4c30-e0c0-4e25-a1d1-9674058f0d5b req-031efe2b-3a5f-4e04-8f68-cee9a5e371b0 service nova] Releasing lock "refresh_cache-1110d6ca-ca5f-44d1-baca-c22c8fc166b5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.883127] env[63297]: DEBUG nova.scheduler.client.report [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1624.008473] env[63297]: DEBUG nova.compute.manager [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1624.009400] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e8d78b-3f22-4123-8a6a-b719107a891f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.037574] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52725130-bf8d-ecf9-58fc-4f3abb3b5fef, 'name': SearchDatastore_Task, 'duration_secs': 0.011426} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.038460] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd055554-7f28-45e6-a833-e89a6cfe55d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.044461] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1624.044461] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5274183e-65c6-5464-0362-484e3300b710" [ 1624.044461] env[63297]: _type = "Task" [ 1624.044461] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.055311] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5274183e-65c6-5464-0362-484e3300b710, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.171799] env[63297]: DEBUG nova.network.neutron [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1624.248301] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.248550] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.248733] env[63297]: DEBUG nova.compute.manager [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1624.249592] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1eeb251-a41c-407c-b42d-af1882d6e008 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.257287] env[63297]: DEBUG nova.compute.manager [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1624.257838] env[63297]: DEBUG nova.objects.instance [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'flavor' on Instance uuid fd178a30-b5f0-4019-a05f-f1928e1d122a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1624.336014] env[63297]: DEBUG nova.objects.instance [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lazy-loading 'flavor' on Instance uuid 0b7b9f1b-d277-4219-92fb-e35a8b867e77 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1624.346122] env[63297]: DEBUG nova.network.neutron [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Updating instance_info_cache with network_info: [{"id": "dcb70072-f858-420c-861b-1f6f17a1615b", "address": "fa:16:3e:04:7b:da", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcb70072-f8", "ovs_interfaceid": "dcb70072-f858-420c-861b-1f6f17a1615b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.388957] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.391408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.593s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.391653] env[63297]: DEBUG nova.objects.instance [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lazy-loading 'resources' on Instance uuid 6c1aa85a-ee37-461b-ad8a-7fbb525e836e {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1624.414024] env[63297]: INFO nova.scheduler.client.report [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Deleted allocations for instance b261c90f-642d-42b7-8b79-d87eeaf0537a [ 1624.520553] env[63297]: INFO nova.compute.manager [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] instance snapshotting [ 1624.521165] env[63297]: DEBUG nova.objects.instance [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'flavor' on Instance uuid b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1624.556132] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5274183e-65c6-5464-0362-484e3300b710, 'name': SearchDatastore_Task, 'duration_secs': 0.024051} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.556466] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.556746] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5/1110d6ca-ca5f-44d1-baca-c22c8fc166b5.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1624.557016] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97a299b3-b201-4e54-ba06-22a211235b07 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.565104] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1624.565104] env[63297]: value = "task-1697999" [ 1624.565104] env[63297]: _type = "Task" [ 1624.565104] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.574651] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697999, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.604858] env[63297]: DEBUG nova.compute.manager [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Received event network-vif-plugged-dcb70072-f858-420c-861b-1f6f17a1615b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1624.605135] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] Acquiring lock "617544f0-fa53-415d-9f00-c8143e8e25b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.605345] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] Lock "617544f0-fa53-415d-9f00-c8143e8e25b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.605516] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] Lock "617544f0-fa53-415d-9f00-c8143e8e25b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.605681] env[63297]: DEBUG nova.compute.manager [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] No waiting events found dispatching network-vif-plugged-dcb70072-f858-420c-861b-1f6f17a1615b {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1624.605840] env[63297]: WARNING nova.compute.manager [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Received unexpected event network-vif-plugged-dcb70072-f858-420c-861b-1f6f17a1615b for instance with vm_state building and task_state spawning. [ 1624.605994] env[63297]: DEBUG nova.compute.manager [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Received event network-changed-dcb70072-f858-420c-861b-1f6f17a1615b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1624.606162] env[63297]: DEBUG nova.compute.manager [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Refreshing instance network info cache due to event network-changed-dcb70072-f858-420c-861b-1f6f17a1615b. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1624.606323] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] Acquiring lock "refresh_cache-617544f0-fa53-415d-9f00-c8143e8e25b1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.763390] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1624.763576] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93ebf689-1400-4abe-908d-25dbffc8f651 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.773349] env[63297]: DEBUG oslo_vmware.api [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1624.773349] env[63297]: value = "task-1698000" [ 1624.773349] env[63297]: _type = "Task" [ 1624.773349] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.783320] env[63297]: DEBUG oslo_vmware.api [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698000, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.848083] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "refresh_cache-617544f0-fa53-415d-9f00-c8143e8e25b1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.848482] env[63297]: DEBUG nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Instance network_info: |[{"id": "dcb70072-f858-420c-861b-1f6f17a1615b", "address": "fa:16:3e:04:7b:da", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcb70072-f8", "ovs_interfaceid": "dcb70072-f858-420c-861b-1f6f17a1615b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1624.848778] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] Acquired lock "refresh_cache-617544f0-fa53-415d-9f00-c8143e8e25b1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.848971] env[63297]: DEBUG nova.network.neutron [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Refreshing network info cache for port dcb70072-f858-420c-861b-1f6f17a1615b {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1624.850160] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:7b:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcb70072-f858-420c-861b-1f6f17a1615b', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1624.858361] env[63297]: DEBUG oslo.service.loopingcall [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1624.859516] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1624.859738] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7d7356a-db85-4e7f-8304-faab31263b28 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.882882] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1624.882882] env[63297]: value = "task-1698001" [ 1624.882882] env[63297]: _type = "Task" [ 1624.882882] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.892371] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698001, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.924623] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685ff8c-b1d3-47d5-8377-3c4e7fc34c69 tempest-ServersWithSpecificFlavorTestJSON-1897186386 tempest-ServersWithSpecificFlavorTestJSON-1897186386-project-member] Lock "b261c90f-642d-42b7-8b79-d87eeaf0537a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.017s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.026402] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92282b4d-a81c-4e4c-a1a1-62a70572138d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.059701] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075c1efb-b57a-4bc1-8eb4-d150d08f2f27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.088680] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697999, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.228349] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a805346a-01c0-4b16-8921-91661b8d9478 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.236489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2626fab-d4f6-48a9-954c-324f5b628e1d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.268638] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276da305-4749-41d8-9878-b77f5ae1c1c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.280734] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c6cc56-e6ca-47c6-8b28-3e2b69ed88f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.287598] env[63297]: DEBUG oslo_vmware.api [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698000, 'name': PowerOffVM_Task, 'duration_secs': 0.232161} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.288288] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1625.288475] env[63297]: DEBUG nova.compute.manager [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1625.289249] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a7a548-07ac-4b39-87a0-212e52b113e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.300169] env[63297]: DEBUG nova.compute.provider_tree [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1625.343112] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bbf7f45c-6efc-4e8d-87fc-237e179302f0 tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.263s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.394333] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698001, 'name': CreateVM_Task, 'duration_secs': 0.504104} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.394503] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1625.395142] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.395305] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.395752] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1625.396011] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e20ce144-5b21-458d-9d77-28e91ae7038b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.401243] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1625.401243] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52442cfe-e7fb-74cf-f1bd-1675a06920dc" [ 1625.401243] env[63297]: _type = "Task" [ 1625.401243] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.409498] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52442cfe-e7fb-74cf-f1bd-1675a06920dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.559609] env[63297]: DEBUG nova.network.neutron [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Updated VIF entry in instance network info cache for port dcb70072-f858-420c-861b-1f6f17a1615b. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1625.559986] env[63297]: DEBUG nova.network.neutron [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Updating instance_info_cache with network_info: [{"id": "dcb70072-f858-420c-861b-1f6f17a1615b", "address": "fa:16:3e:04:7b:da", "network": {"id": "c3008d10-e30b-4ec3-bdcf-a6eb5fc754ce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2018586182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d5cb4b4799b4b8b99648e718dbc0254", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcb70072-f8", "ovs_interfaceid": "dcb70072-f858-420c-861b-1f6f17a1615b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.579659] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1625.579958] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1697999, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728008} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.580433] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d11f4174-3caf-42e8-865e-4e150e1a6774 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.582330] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5/1110d6ca-ca5f-44d1-baca-c22c8fc166b5.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1625.582532] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1625.582756] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88e61ca9-e6b2-410e-947a-656c433d8091 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.590013] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1625.590013] env[63297]: value = "task-1698003" [ 1625.590013] env[63297]: _type = "Task" [ 1625.590013] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.591252] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1625.591252] env[63297]: value = "task-1698002" [ 1625.591252] env[63297]: _type = "Task" [ 1625.591252] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.603204] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698003, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.606517] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698002, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.803761] env[63297]: DEBUG nova.scheduler.client.report [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1625.816760] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8847bc09-2bab-4baf-a33d-e235dae28116 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.566s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.913334] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52442cfe-e7fb-74cf-f1bd-1675a06920dc, 'name': SearchDatastore_Task, 'duration_secs': 0.047242} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.913632] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.913858] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1625.914103] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.914250] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.914430] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1625.914769] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24d332ae-396f-4d5e-89fc-b82a0613de94 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.930265] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1625.931230] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1625.931230] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0625c71c-ee88-468a-a893-82beb90b785a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.937084] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1625.937084] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527714c7-a890-65cd-7b19-a5ed8d314f4b" [ 1625.937084] env[63297]: _type = "Task" [ 1625.937084] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.945557] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527714c7-a890-65cd-7b19-a5ed8d314f4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.063358] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6c296f9-5234-41f7-b9bd-15ebc31a5f27 req-efd7b740-ffeb-48a9-ab51-9d319c639d3d service nova] Releasing lock "refresh_cache-617544f0-fa53-415d-9f00-c8143e8e25b1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.104025] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698003, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06489} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.107390] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1626.107706] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698002, 'name': CreateSnapshot_Task, 'duration_secs': 0.463717} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.108421] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde34ad6-459b-4c6e-9e03-1e17e16d3380 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.111228] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1626.111912] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7908d09b-f7c6-4ba9-a269-dec74fc69958 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.138803] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5/1110d6ca-ca5f-44d1-baca-c22c8fc166b5.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1626.141334] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7823fc15-1fa0-4770-bd6c-2300d8582845 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.163257] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1626.163257] env[63297]: value = "task-1698004" [ 1626.163257] env[63297]: _type = "Task" [ 1626.163257] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.172024] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698004, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.312294] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.921s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.316666] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.944s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.316666] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.316666] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1626.316666] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.790s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.316666] env[63297]: DEBUG nova.objects.instance [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lazy-loading 'resources' on Instance uuid f9ad9854-2f5b-4edd-9636-8d36d0a89e89 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1626.318594] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817c8ac5-a928-4856-96f1-eac0f7e0154d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.329244] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8ee594-33db-4d39-98bb-7af8c0d171c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.347325] env[63297]: INFO nova.scheduler.client.report [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Deleted allocations for instance 6c1aa85a-ee37-461b-ad8a-7fbb525e836e [ 1626.349259] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdaed0c7-f2dd-4fcc-8d44-23306c1dc24a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.359566] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b165b8-0b70-4569-a220-60aabcfc563b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.392549] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179087MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1626.392748] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.449130] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527714c7-a890-65cd-7b19-a5ed8d314f4b, 'name': SearchDatastore_Task, 'duration_secs': 0.015976} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.449958] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-372806f9-b00f-4298-b3b5-674516dc6c59 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.456691] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1626.456691] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5272e65f-d036-d35e-50a0-94623204fead" [ 1626.456691] env[63297]: _type = "Task" [ 1626.456691] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.467645] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5272e65f-d036-d35e-50a0-94623204fead, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.523762] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.524043] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.524262] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.524445] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.524610] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.526692] env[63297]: INFO nova.compute.manager [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Terminating instance [ 1626.531641] env[63297]: DEBUG nova.compute.manager [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1626.531641] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1626.531641] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae1e1a4-d465-409c-9edb-82d095055e68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.540070] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1626.540070] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80fcc478-8a2a-48c6-a4b6-f7a3372bfe89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.547629] env[63297]: DEBUG oslo_vmware.api [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1626.547629] env[63297]: value = "task-1698005" [ 1626.547629] env[63297]: _type = "Task" [ 1626.547629] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.559139] env[63297]: DEBUG oslo_vmware.api [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1698005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.663652] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1626.664086] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-79b4bd1a-fbcd-4f2f-aad6-40a5842ad342 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.681019] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698004, 'name': ReconfigVM_Task, 'duration_secs': 0.316616} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.681019] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5/1110d6ca-ca5f-44d1-baca-c22c8fc166b5.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1626.681019] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1626.681019] env[63297]: value = "task-1698006" [ 1626.681019] env[63297]: _type = "Task" [ 1626.681019] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.681019] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24abacdc-9981-4dc7-ad8d-09f0d2e84cc7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.691302] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698006, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.692915] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1626.692915] env[63297]: value = "task-1698007" [ 1626.692915] env[63297]: _type = "Task" [ 1626.692915] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.702068] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698007, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.858345] env[63297]: DEBUG oslo_concurrency.lockutils [None req-00e81dee-9332-4cb1-b8b8-61ee08889ffb tempest-ServerRescueTestJSONUnderV235-2002775099 tempest-ServerRescueTestJSONUnderV235-2002775099-project-member] Lock "6c1aa85a-ee37-461b-ad8a-7fbb525e836e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.952s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.970331] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5272e65f-d036-d35e-50a0-94623204fead, 'name': SearchDatastore_Task, 'duration_secs': 0.018785} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.970614] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.970864] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 617544f0-fa53-415d-9f00-c8143e8e25b1/617544f0-fa53-415d-9f00-c8143e8e25b1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1626.971131] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc9e0546-09e5-4474-b11e-8c564c6fd15a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.973247] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.973455] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.973645] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "fd178a30-b5f0-4019-a05f-f1928e1d122a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.973814] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.974016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.976449] env[63297]: INFO nova.compute.manager [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Terminating instance [ 1626.979020] env[63297]: DEBUG nova.compute.manager [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1626.979226] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1626.980076] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e7eea0-7ea2-4006-a504-7fe587ccf421 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.985651] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1626.985651] env[63297]: value = "task-1698008" [ 1626.985651] env[63297]: _type = "Task" [ 1626.985651] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.994791] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1626.995484] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af979ee7-5b0c-4a30-805c-901220c2086a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.001264] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.060137] env[63297]: DEBUG oslo_vmware.api [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1698005, 'name': PowerOffVM_Task, 'duration_secs': 0.409594} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.063145] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1627.063341] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1627.063832] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7396c156-d39c-46eb-95c6-911e2b382e77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.087997] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1627.088349] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1627.088854] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleting the datastore file [datastore1] fd178a30-b5f0-4019-a05f-f1928e1d122a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1627.088854] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60163864-fc92-4557-a90d-5fa8b757845e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.102040] env[63297]: DEBUG oslo_vmware.api [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1627.102040] env[63297]: value = "task-1698011" [ 1627.102040] env[63297]: _type = "Task" [ 1627.102040] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.117071] env[63297]: DEBUG oslo_vmware.api [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.136710] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725be438-163a-4f32-b46b-1163f4b7ebfd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.145624] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63708518-422c-4cda-8699-7cd5c56f1a44 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.180554] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44dab699-3312-4e79-8001-1d2df3fe0845 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.193489] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698006, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.200471] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daecf5b8-afd8-4eb1-b100-595cd5968000 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.211953] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698007, 'name': Rename_Task, 'duration_secs': 0.359779} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.220765] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1627.221337] env[63297]: DEBUG nova.compute.provider_tree [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.222684] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3536132c-276a-401b-9cbc-1770a10f89f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.231279] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1627.231279] env[63297]: value = "task-1698012" [ 1627.231279] env[63297]: _type = "Task" [ 1627.231279] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.241695] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.318381] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1627.318381] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1627.318381] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Deleting the datastore file [datastore1] 0b7b9f1b-d277-4219-92fb-e35a8b867e77 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1627.318381] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad90ca5a-19be-4188-b533-01a4d6c011e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.330347] env[63297]: DEBUG oslo_vmware.api [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for the task: (returnval){ [ 1627.330347] env[63297]: value = "task-1698013" [ 1627.330347] env[63297]: _type = "Task" [ 1627.330347] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.343079] env[63297]: DEBUG oslo_vmware.api [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1698013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.499771] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698008, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.612991] env[63297]: DEBUG oslo_vmware.api [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698011, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475101} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.613357] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1627.613489] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1627.613665] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1627.613840] env[63297]: INFO nova.compute.manager [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1627.614106] env[63297]: DEBUG oslo.service.loopingcall [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1627.614523] env[63297]: DEBUG nova.compute.manager [-] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1627.614523] env[63297]: DEBUG nova.network.neutron [-] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1627.694954] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698006, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.726268] env[63297]: DEBUG nova.scheduler.client.report [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1627.749224] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698012, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.845427] env[63297]: DEBUG oslo_vmware.api [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Task: {'id': task-1698013, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.419985} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.845757] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1627.846903] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1627.846903] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1627.846903] env[63297]: INFO nova.compute.manager [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1627.847194] env[63297]: DEBUG oslo.service.loopingcall [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1627.847414] env[63297]: DEBUG nova.compute.manager [-] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1627.847591] env[63297]: DEBUG nova.network.neutron [-] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1627.998252] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597672} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.998573] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 617544f0-fa53-415d-9f00-c8143e8e25b1/617544f0-fa53-415d-9f00-c8143e8e25b1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1627.998779] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1627.999049] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0aee0ba-815f-46b9-90c9-0037fce14c25 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.008350] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1628.008350] env[63297]: value = "task-1698014" [ 1628.008350] env[63297]: _type = "Task" [ 1628.008350] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.017646] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698014, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.194020] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698006, 'name': CloneVM_Task, 'duration_secs': 1.506537} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.198870] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Created linked-clone VM from snapshot [ 1628.199782] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b92c16-e1c4-42ca-8d24-cae72b035176 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.213492] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Uploading image 5bd2ea09-982b-4387-829f-2079c40f24c0 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1628.234226] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.236376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 8.770s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.246324] env[63297]: DEBUG oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1628.246324] env[63297]: value = "vm-353985" [ 1628.246324] env[63297]: _type = "VirtualMachine" [ 1628.246324] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1628.246699] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ece2221a-2c6f-4c13-83e6-12942bde5240 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.256538] env[63297]: DEBUG oslo_vmware.api [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698012, 'name': PowerOnVM_Task, 'duration_secs': 0.619897} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.261969] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1628.262393] env[63297]: INFO nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1628.262981] env[63297]: DEBUG nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1628.263555] env[63297]: DEBUG oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease: (returnval){ [ 1628.263555] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c1cbf9-0ed7-e692-7ba1-8aedaf4d1b97" [ 1628.263555] env[63297]: _type = "HttpNfcLease" [ 1628.263555] env[63297]: } obtained for exporting VM: (result){ [ 1628.263555] env[63297]: value = "vm-353985" [ 1628.263555] env[63297]: _type = "VirtualMachine" [ 1628.263555] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1628.263996] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the lease: (returnval){ [ 1628.263996] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c1cbf9-0ed7-e692-7ba1-8aedaf4d1b97" [ 1628.263996] env[63297]: _type = "HttpNfcLease" [ 1628.263996] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1628.265682] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fa26a6-4aed-4577-a027-e0548d97652b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.269350] env[63297]: INFO nova.scheduler.client.report [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted allocations for instance f9ad9854-2f5b-4edd-9636-8d36d0a89e89 [ 1628.282775] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1628.282775] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c1cbf9-0ed7-e692-7ba1-8aedaf4d1b97" [ 1628.282775] env[63297]: _type = "HttpNfcLease" [ 1628.282775] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1628.331757] env[63297]: DEBUG nova.compute.manager [req-ef5b3c05-dd1b-4940-b005-1a3fe55721c3 req-afed147a-71bb-4c11-b08b-26c363ea0171 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Received event network-vif-deleted-b317a4c8-cd02-4515-9830-7536b71d4451 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1628.332017] env[63297]: INFO nova.compute.manager [req-ef5b3c05-dd1b-4940-b005-1a3fe55721c3 req-afed147a-71bb-4c11-b08b-26c363ea0171 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Neutron deleted interface b317a4c8-cd02-4515-9830-7536b71d4451; detaching it from the instance and deleting it from the info cache [ 1628.332932] env[63297]: DEBUG nova.network.neutron [req-ef5b3c05-dd1b-4940-b005-1a3fe55721c3 req-afed147a-71bb-4c11-b08b-26c363ea0171 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.524171] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698014, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070991} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.526349] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1628.528262] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081a06fb-ea0e-41f1-b18d-b733a9af3c75 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.555059] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 617544f0-fa53-415d-9f00-c8143e8e25b1/617544f0-fa53-415d-9f00-c8143e8e25b1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1628.557431] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27169f93-9373-40a0-8cd8-21e50efc7efe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.578360] env[63297]: DEBUG nova.compute.manager [req-ae74e7d7-6b84-4890-8945-cd0b5a8697fd req-57472b6a-4f1e-4a6f-abd9-8fdca7ec88bc service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Received event network-vif-deleted-287bdace-3df3-414a-8adb-2f8d720f8528 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1628.578595] env[63297]: INFO nova.compute.manager [req-ae74e7d7-6b84-4890-8945-cd0b5a8697fd req-57472b6a-4f1e-4a6f-abd9-8fdca7ec88bc service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Neutron deleted interface 287bdace-3df3-414a-8adb-2f8d720f8528; detaching it from the instance and deleting it from the info cache [ 1628.578763] env[63297]: DEBUG nova.network.neutron [req-ae74e7d7-6b84-4890-8945-cd0b5a8697fd req-57472b6a-4f1e-4a6f-abd9-8fdca7ec88bc service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.588258] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1628.588258] env[63297]: value = "task-1698016" [ 1628.588258] env[63297]: _type = "Task" [ 1628.588258] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.599941] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698016, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.747333] env[63297]: INFO nova.compute.claims [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1628.782827] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1628.782827] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c1cbf9-0ed7-e692-7ba1-8aedaf4d1b97" [ 1628.782827] env[63297]: _type = "HttpNfcLease" [ 1628.782827] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1628.783130] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1db124f-0077-4568-a511-73a5a338b56b tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f9ad9854-2f5b-4edd-9636-8d36d0a89e89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.104s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.784312] env[63297]: DEBUG oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1628.784312] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c1cbf9-0ed7-e692-7ba1-8aedaf4d1b97" [ 1628.784312] env[63297]: _type = "HttpNfcLease" [ 1628.784312] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1628.785889] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33463950-012c-4bb1-b3f9-744fe7c76344 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.799860] env[63297]: DEBUG nova.network.neutron [-] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.801515] env[63297]: INFO nova.compute.manager [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Took 22.07 seconds to build instance. [ 1628.806027] env[63297]: DEBUG oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52811a3f-b62b-4fef-ad20-59f278c9c5b0/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1628.807418] env[63297]: DEBUG oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52811a3f-b62b-4fef-ad20-59f278c9c5b0/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1628.875245] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31831a36-6216-47bb-8590-345be13b6ec0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.889617] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2437bd34-70fa-4d02-89da-7dc6cafa34c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.935690] env[63297]: DEBUG nova.compute.manager [req-ef5b3c05-dd1b-4940-b005-1a3fe55721c3 req-afed147a-71bb-4c11-b08b-26c363ea0171 service nova] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Detach interface failed, port_id=b317a4c8-cd02-4515-9830-7536b71d4451, reason: Instance fd178a30-b5f0-4019-a05f-f1928e1d122a could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1628.959263] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f66f9576-0b8a-4f9c-ab90-49edf62beccb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.031447] env[63297]: DEBUG nova.network.neutron [-] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.082784] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37c312c4-7b42-4215-9301-7c7bc704cb0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.096380] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86b7383-5611-4a1f-9df2-6d19abe9dfd5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.110774] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698016, 'name': ReconfigVM_Task, 'duration_secs': 0.323555} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.113954] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 617544f0-fa53-415d-9f00-c8143e8e25b1/617544f0-fa53-415d-9f00-c8143e8e25b1.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1629.115108] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0bbff286-7303-43e6-b9e6-585ed275eda8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.125212] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1629.125212] env[63297]: value = "task-1698017" [ 1629.125212] env[63297]: _type = "Task" [ 1629.125212] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.144393] env[63297]: DEBUG nova.compute.manager [req-ae74e7d7-6b84-4890-8945-cd0b5a8697fd req-57472b6a-4f1e-4a6f-abd9-8fdca7ec88bc service nova] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Detach interface failed, port_id=287bdace-3df3-414a-8adb-2f8d720f8528, reason: Instance 0b7b9f1b-d277-4219-92fb-e35a8b867e77 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1629.151888] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698017, 'name': Rename_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.255700] env[63297]: INFO nova.compute.resource_tracker [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating resource usage from migration 4c165943-95ae-4824-8da7-8d8dcf153e5d [ 1629.302939] env[63297]: INFO nova.compute.manager [-] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Took 1.69 seconds to deallocate network for instance. [ 1629.310115] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0f8cd358-d165-4b53-b35a-ff08cb5a5a44 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.589s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.470252] env[63297]: DEBUG oslo_concurrency.lockutils [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.473028] env[63297]: DEBUG oslo_concurrency.lockutils [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.473028] env[63297]: DEBUG nova.compute.manager [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1629.473028] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34a906f-ec66-4637-bced-63ee393c42d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.484150] env[63297]: DEBUG nova.compute.manager [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1629.484824] env[63297]: DEBUG nova.objects.instance [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'flavor' on Instance uuid 1110d6ca-ca5f-44d1-baca-c22c8fc166b5 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1629.537785] env[63297]: INFO nova.compute.manager [-] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Took 1.69 seconds to deallocate network for instance. [ 1629.603567] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678ee3da-e622-4939-b123-e1606073a171 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.616142] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8effe8a9-f6cd-4fad-8d0a-9f66cb6d9432 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.620115] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "f5866b1e-cd77-464e-858e-eb14dab0637c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.620518] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f5866b1e-cd77-464e-858e-eb14dab0637c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.620741] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "f5866b1e-cd77-464e-858e-eb14dab0637c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.621124] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f5866b1e-cd77-464e-858e-eb14dab0637c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.621274] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f5866b1e-cd77-464e-858e-eb14dab0637c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.623827] env[63297]: INFO nova.compute.manager [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Terminating instance [ 1629.626524] env[63297]: DEBUG nova.compute.manager [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1629.626782] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1629.632724] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b85617-340d-45a9-a3f8-ab9178b3154e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.663200] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba305ea7-116a-46af-b601-2cd8621f2fd9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.672042] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698017, 'name': Rename_Task, 'duration_secs': 0.157998} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.672326] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1629.673140] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1629.673417] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f50ace84-b1db-4005-aa78-cea51a19469a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.677219] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8af89b6e-7194-4061-8687-b3d23a378e89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.681246] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214fc2bc-b6e0-4f24-ad25-63129f8c54c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.688330] env[63297]: DEBUG oslo_vmware.api [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1629.688330] env[63297]: value = "task-1698018" [ 1629.688330] env[63297]: _type = "Task" [ 1629.688330] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.701805] env[63297]: DEBUG nova.compute.provider_tree [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.706869] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1629.706869] env[63297]: value = "task-1698019" [ 1629.706869] env[63297]: _type = "Task" [ 1629.706869] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.712230] env[63297]: DEBUG oslo_vmware.api [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.718032] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698019, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.813110] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.995949] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1629.995949] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a741b31b-7ee0-4a39-b92a-12e0fbf58bf0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.006997] env[63297]: DEBUG oslo_vmware.api [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1630.006997] env[63297]: value = "task-1698020" [ 1630.006997] env[63297]: _type = "Task" [ 1630.006997] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.017582] env[63297]: DEBUG oslo_vmware.api [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.049590] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.200822] env[63297]: DEBUG oslo_vmware.api [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698018, 'name': PowerOffVM_Task, 'duration_secs': 0.254087} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.201298] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1630.201961] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1630.202334] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5685adab-c3d4-4a17-8a95-ae1965220f4a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.208442] env[63297]: DEBUG nova.scheduler.client.report [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1630.228382] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.228743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.238676] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698019, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.408383] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1630.408741] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1630.408741] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleting the datastore file [datastore1] f5866b1e-cd77-464e-858e-eb14dab0637c {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1630.409061] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-770aa123-e96b-4d1f-a83b-41c3d6b2e4b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.418258] env[63297]: DEBUG oslo_vmware.api [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1630.418258] env[63297]: value = "task-1698022" [ 1630.418258] env[63297]: _type = "Task" [ 1630.418258] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.427651] env[63297]: DEBUG oslo_vmware.api [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.514957] env[63297]: DEBUG oslo_vmware.api [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698020, 'name': PowerOffVM_Task, 'duration_secs': 0.250436} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.515464] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1630.515651] env[63297]: DEBUG nova.compute.manager [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1630.516519] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be53f6a-4305-4b06-95ae-e9976cfc3808 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.729146] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.493s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.729614] env[63297]: INFO nova.compute.manager [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Migrating [ 1630.736591] env[63297]: DEBUG oslo_vmware.api [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698019, 'name': PowerOnVM_Task, 'duration_secs': 0.544089} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.740143] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.347s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.741816] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1630.742077] env[63297]: INFO nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1630.742348] env[63297]: DEBUG nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1630.743405] env[63297]: DEBUG nova.compute.utils [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1630.750332] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf45cfe-e137-436d-998c-33cf7eb1626c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.930061] env[63297]: DEBUG oslo_vmware.api [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291592} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.930418] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1630.930632] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1630.930808] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1630.931569] env[63297]: INFO nova.compute.manager [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1630.931569] env[63297]: DEBUG oslo.service.loopingcall [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.931735] env[63297]: DEBUG nova.compute.manager [-] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1630.931805] env[63297]: DEBUG nova.network.neutron [-] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1631.047182] env[63297]: DEBUG oslo_concurrency.lockutils [None req-df5a8545-27c4-4df1-a556-dc9b516169e5 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.576s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.205767] env[63297]: DEBUG nova.compute.manager [req-7e0a6ff7-ed6c-4783-8bb8-b76081f0a794 req-285c15ab-b0e8-4d58-8bac-2b3264269ddd service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Received event network-vif-deleted-50e51b32-9790-4358-80e3-22b2274beca1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.206098] env[63297]: INFO nova.compute.manager [req-7e0a6ff7-ed6c-4783-8bb8-b76081f0a794 req-285c15ab-b0e8-4d58-8bac-2b3264269ddd service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Neutron deleted interface 50e51b32-9790-4358-80e3-22b2274beca1; detaching it from the instance and deleting it from the info cache [ 1631.206317] env[63297]: DEBUG nova.network.neutron [req-7e0a6ff7-ed6c-4783-8bb8-b76081f0a794 req-285c15ab-b0e8-4d58-8bac-2b3264269ddd service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.249854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.021s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.262845] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.263036] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.263214] env[63297]: DEBUG nova.network.neutron [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1631.270913] env[63297]: INFO nova.compute.manager [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Took 24.20 seconds to build instance. [ 1631.412535] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "d70bfe65-5faa-4248-9119-9a38259cb418" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.412875] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "d70bfe65-5faa-4248-9119-9a38259cb418" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.681535] env[63297]: DEBUG nova.network.neutron [-] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.709652] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-588041ac-b61f-4557-8d70-f0bab2b633d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.722682] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c633338-7be3-4bc4-bce6-a8e5e64cfbf1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.759570] env[63297]: DEBUG nova.compute.manager [req-7e0a6ff7-ed6c-4783-8bb8-b76081f0a794 req-285c15ab-b0e8-4d58-8bac-2b3264269ddd service nova] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Detach interface failed, port_id=50e51b32-9790-4358-80e3-22b2274beca1, reason: Instance f5866b1e-cd77-464e-858e-eb14dab0637c could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1631.766543] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Applying migration context for instance a6d86e78-ae24-4e70-9fb2-270177b40322 as it has an incoming, in-progress migration 4c165943-95ae-4824-8da7-8d8dcf153e5d. Migration status is pre-migrating {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1631.768888] env[63297]: INFO nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating resource usage from migration 4c165943-95ae-4824-8da7-8d8dcf153e5d [ 1631.774898] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cb11d197-b38b-4899-be0a-61287a74454e tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "617544f0-fa53-415d-9f00-c8143e8e25b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.734s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.791967] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.792205] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 765f3232-f3f9-4d9b-92f2-fb6603f2a90a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.792323] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b1ed5d76-d358-49d3-a854-8f968bc987ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.792450] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.792581] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 96265295-6b0c-4803-bb89-6166c9d3fc7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.792724] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 66b7a1e5-5e74-49db-99f3-4427d7297bf2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.792838] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.792958] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 42d872d6-da12-474b-8741-1d991d507cfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.793097] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.793239] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 0b7b9f1b-d277-4219-92fb-e35a8b867e77 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1631.793379] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance f5866b1e-cd77-464e-858e-eb14dab0637c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.793510] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 1d8c6df5-069f-4647-a2f6-e69a4bf8be94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.793648] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.793783] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fd178a30-b5f0-4019-a05f-f1928e1d122a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1631.793921] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 1110d6ca-ca5f-44d1-baca-c22c8fc166b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.794075] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 617544f0-fa53-415d-9f00-c8143e8e25b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.794214] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Migration 4c165943-95ae-4824-8da7-8d8dcf153e5d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1631.794328] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance a6d86e78-ae24-4e70-9fb2-270177b40322 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.915573] env[63297]: DEBUG nova.compute.manager [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1632.066035] env[63297]: DEBUG nova.network.neutron [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance_info_cache with network_info: [{"id": "e0aae0a1-c525-4233-b876-799c11006f75", "address": "fa:16:3e:f7:ce:b0", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0aae0a1-c5", "ovs_interfaceid": "e0aae0a1-c525-4233-b876-799c11006f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.158182] env[63297]: INFO nova.compute.manager [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Rebuilding instance [ 1632.184270] env[63297]: INFO nova.compute.manager [-] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Took 1.25 seconds to deallocate network for instance. [ 1632.300516] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance d70bfe65-5faa-4248-9119-9a38259cb418 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.300873] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1632.300981] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1632.305049] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.305049] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.305049] env[63297]: INFO nova.compute.manager [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Attaching volume 540b2a55-1e8e-476b-87e5-7c4753f15ddb to /dev/sdb [ 1632.351634] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbaaf91-6946-4777-978d-85662185ec27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.358668] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48935939-1058-4e7d-8aa7-51fc5e9c4cb7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.378978] env[63297]: DEBUG nova.virt.block_device [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating existing volume attachment record: c65ed054-e7e9-4fec-9a11-f09e81c39a30 {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1632.436628] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.469958] env[63297]: DEBUG nova.compute.manager [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1632.470934] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22369a17-6c5c-4c93-ab0b-4f925607eb77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.568506] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.617924] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593b9d1d-9f0a-4818-8baa-53380de2e9e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.627438] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f96d7a7-13ab-46aa-829c-1d20e85901f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.660453] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d96c61b-e4b0-4ebb-9984-6abe1c48aaa5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.669025] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51cccf4-cb69-4eaf-b50c-e216e3cd5542 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.683768] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.690779] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.990682] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1632.996022] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5443675b-6c31-45fd-b891-5f8123eb2890 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.001525] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1633.001525] env[63297]: value = "task-1698026" [ 1633.001525] env[63297]: _type = "Task" [ 1633.001525] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.014060] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1633.014060] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1633.014754] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c808e8c-388a-4cda-aab1-81bd571ddeba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.024566] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1633.024872] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ece7df7c-202d-4355-ab56-e67baa1acca3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.162497] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1633.162850] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1633.163066] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleting the datastore file [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1633.163480] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-026841a7-088a-40a6-87ff-e2ff2c671bdc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.172030] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1633.172030] env[63297]: value = "task-1698028" [ 1633.172030] env[63297]: _type = "Task" [ 1633.172030] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.181718] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.189823] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1633.495113] env[63297]: DEBUG nova.compute.manager [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1633.495113] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eaf101b-a505-4238-95db-5001d106603c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.683100] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220426} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.683395] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1633.683582] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1633.683762] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1633.695102] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1633.695317] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.955s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.695571] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.883s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.695749] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.698079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.649s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.698274] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.700015] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.265s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.701572] env[63297]: INFO nova.compute.claims [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1633.726462] env[63297]: INFO nova.scheduler.client.report [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted allocations for instance fd178a30-b5f0-4019-a05f-f1928e1d122a [ 1633.728470] env[63297]: INFO nova.scheduler.client.report [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Deleted allocations for instance 0b7b9f1b-d277-4219-92fb-e35a8b867e77 [ 1634.006530] env[63297]: INFO nova.compute.manager [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] instance snapshotting [ 1634.009510] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7833571d-adba-4b05-83f1-163f8ea9dffb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.029689] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c525ef-1ac7-4e4f-9840-e45381221474 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.086391] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef07cee-51d8-4388-af94-8f47fa295b33 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.107443] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance 'a6d86e78-ae24-4e70-9fb2-270177b40322' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1634.238794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6400a77-9cb4-487c-9e82-02c7918f0e43 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "fd178a30-b5f0-4019-a05f-f1928e1d122a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.265s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.240143] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0ba9d4d9-a914-482d-a682-21cf2af2b0eb tempest-AttachVolumeTestJSON-1356772649 tempest-AttachVolumeTestJSON-1356772649-project-member] Lock "0b7b9f1b-d277-4219-92fb-e35a8b867e77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.716s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.544461] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1634.544900] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-21814647-61b5-4ced-b6e9-fe70559231fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.556287] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1634.556287] env[63297]: value = "task-1698029" [ 1634.556287] env[63297]: _type = "Task" [ 1634.556287] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.566793] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698029, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.613888] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1634.614326] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f8f42e3-b746-42c3-949c-52f5fec7bc71 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.623737] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1634.623737] env[63297]: value = "task-1698031" [ 1634.623737] env[63297]: _type = "Task" [ 1634.623737] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.633718] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698031, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.723104] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1634.723104] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1634.723104] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1634.723104] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1634.723104] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1634.723104] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1634.723104] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1634.723908] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1634.723908] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1634.723908] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1634.724032] env[63297]: DEBUG nova.virt.hardware [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1634.725025] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9981a919-4f36-4296-be48-da825bb7cc27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.735504] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795c086d-9e3d-493b-a33c-6143b3d28c29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.758229] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:58:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67f99216-a730-4066-be67-21fcb4979776', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1634.767868] env[63297]: DEBUG oslo.service.loopingcall [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1634.772437] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1634.773628] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9c009d7-010c-4384-b21b-a0021794e9e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.796655] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1634.796655] env[63297]: value = "task-1698033" [ 1634.796655] env[63297]: _type = "Task" [ 1634.796655] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.807195] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698033, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.005042] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2499aa-71e7-4e45-88c6-3fc1d2a78933 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.013542] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adb4f21-b00b-49ab-93a9-e98c82193ce6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.049918] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0321d6bb-a5e5-4af0-96e2-5f83bddd2d11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.061658] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2e2c56-3917-44a7-815c-15cc69f88e92 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.079193] env[63297]: DEBUG nova.compute.provider_tree [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.081498] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698029, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.135997] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698031, 'name': PowerOffVM_Task, 'duration_secs': 0.208795} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.136503] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1635.136737] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance 'a6d86e78-ae24-4e70-9fb2-270177b40322' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1635.308665] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698033, 'name': CreateVM_Task, 'duration_secs': 0.500302} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.308864] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1635.309575] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1635.309741] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.310084] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1635.310346] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0725e436-e988-46aa-80fe-6b7d5d02c21f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.316161] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1635.316161] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b4e48a-a8e3-7620-1af4-4d4bb64cab1d" [ 1635.316161] env[63297]: _type = "Task" [ 1635.316161] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.325297] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b4e48a-a8e3-7620-1af4-4d4bb64cab1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.487315] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "c2362520-ed07-4124-aade-bb54830b0d54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.487546] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c2362520-ed07-4124-aade-bb54830b0d54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.567975] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698029, 'name': CreateSnapshot_Task, 'duration_secs': 0.69056} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.568359] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1635.569097] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73974e9-536f-462a-83ce-f5be3095203a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.582921] env[63297]: DEBUG nova.scheduler.client.report [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1635.643658] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1635.644359] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1635.644359] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1635.644359] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1635.644620] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1635.644922] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1635.645325] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1635.645524] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1635.645704] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1635.645881] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1635.646073] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1635.651754] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db981848-576a-4b17-a2fc-1a7168a57bba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.668577] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1635.668577] env[63297]: value = "task-1698034" [ 1635.668577] env[63297]: _type = "Task" [ 1635.668577] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.680774] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.826346] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b4e48a-a8e3-7620-1af4-4d4bb64cab1d, 'name': SearchDatastore_Task, 'duration_secs': 0.017419} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.826595] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.826834] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1635.827078] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1635.827225] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.827406] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1635.827682] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-839fb484-5cdb-48ef-ad4e-c9f773fae75c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.836061] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1635.836243] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1635.836978] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ead4a31-833a-4b5d-9d31-70c2fab6c946 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.844430] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1635.844430] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527b3b22-1f06-925d-ad5e-21ddf81035b5" [ 1635.844430] env[63297]: _type = "Task" [ 1635.844430] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.852684] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527b3b22-1f06-925d-ad5e-21ddf81035b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.990045] env[63297]: DEBUG nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1636.088223] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1636.089230] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.089606] env[63297]: DEBUG nova.compute.manager [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1636.092114] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d2e4696f-0dc4-4ed2-8c58-b603f222344a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.095474] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.405s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.095705] env[63297]: DEBUG nova.objects.instance [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lazy-loading 'resources' on Instance uuid f5866b1e-cd77-464e-858e-eb14dab0637c {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1636.103972] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1636.103972] env[63297]: value = "task-1698035" [ 1636.103972] env[63297]: _type = "Task" [ 1636.103972] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.115123] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698035, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.181151] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698034, 'name': ReconfigVM_Task, 'duration_secs': 0.188598} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.181598] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance 'a6d86e78-ae24-4e70-9fb2-270177b40322' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1636.335998] env[63297]: DEBUG oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52811a3f-b62b-4fef-ad20-59f278c9c5b0/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1636.337089] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa05223-5e5c-4bdf-abcf-d8be2449f30a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.343712] env[63297]: DEBUG oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52811a3f-b62b-4fef-ad20-59f278c9c5b0/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1636.343910] env[63297]: ERROR oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52811a3f-b62b-4fef-ad20-59f278c9c5b0/disk-0.vmdk due to incomplete transfer. [ 1636.344166] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1bd1bc10-7757-44cd-929e-b309dc55a183 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.358045] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527b3b22-1f06-925d-ad5e-21ddf81035b5, 'name': SearchDatastore_Task, 'duration_secs': 0.009844} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.358045] env[63297]: DEBUG oslo_vmware.rw_handles [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52811a3f-b62b-4fef-ad20-59f278c9c5b0/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1636.358045] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Uploaded image 5bd2ea09-982b-4387-829f-2079c40f24c0 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1636.360087] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1636.361127] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63faecac-267c-408b-938f-3d8aeb44739d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.362662] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f90f956b-670d-4de1-b3ef-b267ba6a0a35 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.367528] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1636.367528] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5226652f-a73b-aff6-fc04-c5df7d2df258" [ 1636.367528] env[63297]: _type = "Task" [ 1636.367528] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.371909] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1636.371909] env[63297]: value = "task-1698036" [ 1636.371909] env[63297]: _type = "Task" [ 1636.371909] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.377737] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5226652f-a73b-aff6-fc04-c5df7d2df258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.382346] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698036, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.511463] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.596963] env[63297]: DEBUG nova.compute.utils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1636.598550] env[63297]: DEBUG nova.compute.manager [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Not allocating networking since 'none' was specified. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1636.614235] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698035, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.688749] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1636.688998] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1636.689168] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1636.689349] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1636.689495] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1636.689647] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1636.689882] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1636.690058] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1636.690228] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1636.690393] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1636.690599] env[63297]: DEBUG nova.virt.hardware [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1636.696285] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1636.696775] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1828518e-cc73-4587-9064-667db3041a10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.718124] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1636.718124] env[63297]: value = "task-1698037" [ 1636.718124] env[63297]: _type = "Task" [ 1636.718124] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.726981] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698037, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.860324] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29e2be1-22b0-4638-9ede-5d5b737292c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.868397] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1d5e44-9352-4333-b20c-7ebb2d9a4d78 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.907416] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c947c128-d8a3-4dca-9427-3dc11eda0b67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.909859] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5226652f-a73b-aff6-fc04-c5df7d2df258, 'name': SearchDatastore_Task, 'duration_secs': 0.009915} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.912846] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1636.913149] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5/1110d6ca-ca5f-44d1-baca-c22c8fc166b5.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1636.913428] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698036, 'name': Destroy_Task, 'duration_secs': 0.425218} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.913953] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7127664-53ff-4236-a062-b626ee6655c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.916014] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Destroyed the VM [ 1636.916268] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1636.918182] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-368655c1-1749-42b3-b5a4-ff635211a365 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.920918] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d851d5-48e0-4401-86ed-3570d183da8a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.925872] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1636.925872] env[63297]: value = "task-1698038" [ 1636.925872] env[63297]: _type = "Task" [ 1636.925872] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.937829] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1636.937829] env[63297]: value = "task-1698039" [ 1636.937829] env[63297]: _type = "Task" [ 1636.937829] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.938363] env[63297]: DEBUG nova.compute.provider_tree [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.941026] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1636.941026] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353987', 'volume_id': '540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'name': 'volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1d8c6df5-069f-4647-a2f6-e69a4bf8be94', 'attached_at': '', 'detached_at': '', 'volume_id': '540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'serial': '540b2a55-1e8e-476b-87e5-7c4753f15ddb'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1636.942201] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3edef1a-d81f-4c0c-b21c-e6fd05954048 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.950596] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.956770] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698039, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.968311] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fe2310-f041-47d9-9850-2017759538f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.993384] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb/volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1636.993658] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9362af6-9197-477f-8890-955e94ad3f3c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.013028] env[63297]: DEBUG oslo_vmware.api [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1637.013028] env[63297]: value = "task-1698040" [ 1637.013028] env[63297]: _type = "Task" [ 1637.013028] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.020854] env[63297]: DEBUG oslo_vmware.api [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698040, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.103485] env[63297]: DEBUG nova.compute.manager [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1637.116124] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698035, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.228880] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698037, 'name': ReconfigVM_Task, 'duration_secs': 0.186363} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.229271] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1637.230216] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5e3077-6fd2-404d-a661-630e3bb5aaae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.254971] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] a6d86e78-ae24-4e70-9fb2-270177b40322/a6d86e78-ae24-4e70-9fb2-270177b40322.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1637.255357] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df3a4fe9-c695-43d6-96ff-4a853efd69f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.274595] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1637.274595] env[63297]: value = "task-1698041" [ 1637.274595] env[63297]: _type = "Task" [ 1637.274595] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.285354] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698041, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.437933] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698038, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471101} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.438249] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5/1110d6ca-ca5f-44d1-baca-c22c8fc166b5.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1637.438545] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1637.438821] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15fd396a-53bd-4c05-9664-1fe01fe3079f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.451038] env[63297]: DEBUG nova.scheduler.client.report [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1637.455202] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698039, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.457458] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1637.457458] env[63297]: value = "task-1698042" [ 1637.457458] env[63297]: _type = "Task" [ 1637.457458] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.466792] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698042, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.523200] env[63297]: DEBUG oslo_vmware.api [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698040, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.619845] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698035, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.785594] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698041, 'name': ReconfigVM_Task, 'duration_secs': 0.363713} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.785934] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Reconfigured VM instance instance-0000005d to attach disk [datastore1] a6d86e78-ae24-4e70-9fb2-270177b40322/a6d86e78-ae24-4e70-9fb2-270177b40322.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1637.786258] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance 'a6d86e78-ae24-4e70-9fb2-270177b40322' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1637.952856] env[63297]: DEBUG oslo_vmware.api [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698039, 'name': RemoveSnapshot_Task, 'duration_secs': 0.549198} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.953099] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1637.953332] env[63297]: INFO nova.compute.manager [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Took 12.93 seconds to snapshot the instance on the hypervisor. [ 1637.957883] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.960225] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.449s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.961825] env[63297]: INFO nova.compute.claims [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1637.973382] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698042, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077792} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.973512] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1637.974192] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8cb1f7-373c-4ac9-b4ca-9c33331232e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.996150] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5/1110d6ca-ca5f-44d1-baca-c22c8fc166b5.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1637.997258] env[63297]: INFO nova.scheduler.client.report [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted allocations for instance f5866b1e-cd77-464e-858e-eb14dab0637c [ 1637.999786] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0d92afe-8623-4578-b0b4-26b082b5e7ed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.027639] env[63297]: DEBUG oslo_vmware.api [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698040, 'name': ReconfigVM_Task, 'duration_secs': 0.769197} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.028959] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfigured VM instance instance-00000059 to attach disk [datastore1] volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb/volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1638.033772] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1638.033772] env[63297]: value = "task-1698044" [ 1638.033772] env[63297]: _type = "Task" [ 1638.033772] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.033964] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a77b3e0-7e93-4f0f-8d01-b18bf1cf59ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.055165] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.056697] env[63297]: DEBUG oslo_vmware.api [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1638.056697] env[63297]: value = "task-1698045" [ 1638.056697] env[63297]: _type = "Task" [ 1638.056697] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.065397] env[63297]: DEBUG oslo_vmware.api [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698045, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.121711] env[63297]: DEBUG nova.compute.manager [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1638.124423] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698035, 'name': CloneVM_Task, 'duration_secs': 1.861596} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.125049] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Created linked-clone VM from snapshot [ 1638.127086] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a47cbaf-e292-4094-87c3-99314562c726 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.138579] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Uploading image a6e60b25-3b73-4a5a-adaf-d6bf74f686f7 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1638.150342] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1638.150342] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6d5604e2-c165-45cf-a98e-e94728c9a8db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.155035] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1638.155035] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1638.155035] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.155035] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1638.155035] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.155035] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1638.155331] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1638.155398] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1638.155615] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1638.155736] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1638.155912] env[63297]: DEBUG nova.virt.hardware [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1638.156770] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e345de-a46f-4acf-8652-b6876d842493 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.166330] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f54a16b-ac27-42a0-a6ac-65aa790e58b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.170561] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1638.170561] env[63297]: value = "task-1698046" [ 1638.170561] env[63297]: _type = "Task" [ 1638.170561] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.182095] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1638.187755] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Creating folder: Project (bb8ce9ed10904ed488b3c19ad1445628). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1638.188475] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb0117c2-3fa5-49f5-a794-03518ae4a1ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.193069] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698046, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.202354] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Created folder: Project (bb8ce9ed10904ed488b3c19ad1445628) in parent group-v353718. [ 1638.202589] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Creating folder: Instances. Parent ref: group-v353991. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1638.202836] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-683f969c-31c8-4c2c-9dcd-b86132ee6b2f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.212530] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Created folder: Instances in parent group-v353991. [ 1638.212530] env[63297]: DEBUG oslo.service.loopingcall [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1638.212724] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1638.213019] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-018b1c43-ac4f-47dd-a842-8084d4a45ef5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.229985] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1638.229985] env[63297]: value = "task-1698049" [ 1638.229985] env[63297]: _type = "Task" [ 1638.229985] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.242445] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698049, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.294642] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39315f34-0a04-491a-a1b2-a71ec5adf8af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.313443] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ff1c09-5ed7-47d0-b300-a303012deac4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.332333] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance 'a6d86e78-ae24-4e70-9fb2-270177b40322' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1638.510960] env[63297]: DEBUG nova.compute.manager [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Found 3 images (rotation: 2) {{(pid=63297) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1638.511204] env[63297]: DEBUG nova.compute.manager [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Rotating out 1 backups {{(pid=63297) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 1638.511383] env[63297]: DEBUG nova.compute.manager [None req-5c31ad7d-f3f5-43b7-aae9-db8246f25f99 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleting image 6e1de458-527b-4161-a12d-2d2d0f5efddb {{(pid=63297) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 1638.522475] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2e66da23-ab2c-4dc8-9f88-c08db573af15 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "f5866b1e-cd77-464e-858e-eb14dab0637c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.902s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.556151] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698044, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.565669] env[63297]: DEBUG oslo_vmware.api [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698045, 'name': ReconfigVM_Task, 'duration_secs': 0.181088} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.565913] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353987', 'volume_id': '540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'name': 'volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1d8c6df5-069f-4647-a2f6-e69a4bf8be94', 'attached_at': '', 'detached_at': '', 'volume_id': '540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'serial': '540b2a55-1e8e-476b-87e5-7c4753f15ddb'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1638.681533] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698046, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.740115] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698049, 'name': CreateVM_Task, 'duration_secs': 0.324038} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.740115] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1638.741222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.741515] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.741929] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1638.742258] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c30c87f1-6eac-4eff-b496-d4ad17ee4f06 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.747193] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1638.747193] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d0061-6590-089f-138b-0f13f179265a" [ 1638.747193] env[63297]: _type = "Task" [ 1638.747193] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.756635] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d0061-6590-089f-138b-0f13f179265a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.882172] env[63297]: DEBUG nova.network.neutron [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Port e0aae0a1-c525-4233-b876-799c11006f75 binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1639.054369] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698044, 'name': ReconfigVM_Task, 'duration_secs': 0.567258} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.056971] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5/1110d6ca-ca5f-44d1-baca-c22c8fc166b5.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1639.057946] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b386dbf-4ca9-45e9-885a-0960c68e2c01 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.065331] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1639.065331] env[63297]: value = "task-1698050" [ 1639.065331] env[63297]: _type = "Task" [ 1639.065331] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.077018] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698050, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.129791] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "c89d23b7-deb0-4394-9a42-2ac3990da98d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.130408] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "c89d23b7-deb0-4394-9a42-2ac3990da98d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.181490] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698046, 'name': Destroy_Task, 'duration_secs': 0.590849} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.184180] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Destroyed the VM [ 1639.184499] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1639.184970] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8d8e693d-a0db-40d4-ad75-404fc4a75c91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.198717] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1639.198717] env[63297]: value = "task-1698051" [ 1639.198717] env[63297]: _type = "Task" [ 1639.198717] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.207192] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698051, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.243998] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f647d844-a772-4311-9216-fbf3f29bd07c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.254488] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bff21c1-a677-4e82-a5c8-772d0075969a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.263898] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526d0061-6590-089f-138b-0f13f179265a, 'name': SearchDatastore_Task, 'duration_secs': 0.010314} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.264227] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.264557] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1639.264723] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.264911] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.265091] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1639.265378] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db96edf9-f12d-4c46-b560-e43748a97031 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.294855] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40a4d30-ab39-4359-ba64-895404726c81 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.303327] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dc4c77-5220-4d22-8ab3-5a01b9e86224 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.307405] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1639.307593] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1639.308324] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc7bb833-b2c3-466b-ba42-742797fbab91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.321517] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1639.321517] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aa5783-d5a5-34db-4302-43aefc68bf34" [ 1639.321517] env[63297]: _type = "Task" [ 1639.321517] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.321995] env[63297]: DEBUG nova.compute.provider_tree [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1639.332579] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52aa5783-d5a5-34db-4302-43aefc68bf34, 'name': SearchDatastore_Task, 'duration_secs': 0.008609} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.333892] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d33653f5-f2ac-452c-aaa9-0fb59b61a6dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.339264] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1639.339264] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52191dad-77ba-9445-6b5a-6621ffa19b4a" [ 1639.339264] env[63297]: _type = "Task" [ 1639.339264] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.346879] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52191dad-77ba-9445-6b5a-6621ffa19b4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.576029] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698050, 'name': Rename_Task, 'duration_secs': 0.159121} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.576656] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1639.576656] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6feb2f3f-762f-4305-aaea-f659d73a765b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.583051] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1639.583051] env[63297]: value = "task-1698052" [ 1639.583051] env[63297]: _type = "Task" [ 1639.583051] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.590813] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.630959] env[63297]: DEBUG nova.objects.instance [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lazy-loading 'flavor' on Instance uuid 1d8c6df5-069f-4647-a2f6-e69a4bf8be94 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1639.633694] env[63297]: DEBUG nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1639.708392] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698051, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.826301] env[63297]: DEBUG nova.scheduler.client.report [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1639.851222] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52191dad-77ba-9445-6b5a-6621ffa19b4a, 'name': SearchDatastore_Task, 'duration_secs': 0.008365} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.851490] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.851742] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418/d70bfe65-5faa-4248-9119-9a38259cb418.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1639.852008] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1952e487-ba0e-46ad-8da4-94aeb3a342f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.859308] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1639.859308] env[63297]: value = "task-1698053" [ 1639.859308] env[63297]: _type = "Task" [ 1639.859308] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.868473] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.904946] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.905765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.905765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.093773] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698052, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.144021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3834cb76-09d8-4370-a8b5-3dca5377ede3 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.839s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.163929] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.211166] env[63297]: DEBUG oslo_vmware.api [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698051, 'name': RemoveSnapshot_Task, 'duration_secs': 0.806256} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.211925] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1640.332711] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.333433] env[63297]: DEBUG nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1640.336607] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.173s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.338165] env[63297]: INFO nova.compute.claims [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1640.370541] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698053, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49598} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.370884] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418/d70bfe65-5faa-4248-9119-9a38259cb418.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1640.371252] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1640.372063] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-443a5836-bd90-4236-a1fc-23a2dbe6d39f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.382099] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1640.382099] env[63297]: value = "task-1698054" [ 1640.382099] env[63297]: _type = "Task" [ 1640.382099] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.390815] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.593950] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698052, 'name': PowerOnVM_Task, 'duration_secs': 0.566518} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.594744] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1640.594744] env[63297]: DEBUG nova.compute.manager [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1640.595213] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e536d808-0d2f-4fed-99b0-bafefc6c00f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.719023] env[63297]: WARNING nova.compute.manager [None req-01215841-c8d3-4bb9-965b-2bd0cbca3d43 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Image not found during snapshot: nova.exception.ImageNotFound: Image a6e60b25-3b73-4a5a-adaf-d6bf74f686f7 could not be found. [ 1640.795081] env[63297]: INFO nova.compute.manager [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Rescuing [ 1640.795422] env[63297]: DEBUG oslo_concurrency.lockutils [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.795612] env[63297]: DEBUG oslo_concurrency.lockutils [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.795792] env[63297]: DEBUG nova.network.neutron [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1640.837797] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.838119] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.838308] env[63297]: DEBUG nova.compute.manager [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1640.839904] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8126224c-5a64-4419-8ad6-1eb8d8784850 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.847265] env[63297]: DEBUG nova.compute.utils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1640.849863] env[63297]: DEBUG nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1640.849863] env[63297]: DEBUG nova.network.neutron [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1640.855722] env[63297]: DEBUG nova.compute.manager [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1640.856175] env[63297]: DEBUG nova.objects.instance [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'flavor' on Instance uuid b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1640.891061] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070161} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.892423] env[63297]: DEBUG nova.policy [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0da2fdb3c81747698f971951c5e0068b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc8039a70b34a269d3aed1ecb558b7e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1640.893816] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1640.894598] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01fb1b9-0c22-4f53-8a4e-f4481c9524b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.914883] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418/d70bfe65-5faa-4248-9119-9a38259cb418.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1640.917670] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01857bd1-945e-4a13-9731-1a334345a647 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.940971] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1640.940971] env[63297]: value = "task-1698055" [ 1640.940971] env[63297]: _type = "Task" [ 1640.940971] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.950087] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698055, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.963873] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.964216] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.964426] env[63297]: DEBUG nova.network.neutron [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1641.106407] env[63297]: INFO nova.compute.manager [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] bringing vm to original state: 'stopped' [ 1641.188916] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "617544f0-fa53-415d-9f00-c8143e8e25b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.189248] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "617544f0-fa53-415d-9f00-c8143e8e25b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.189462] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "617544f0-fa53-415d-9f00-c8143e8e25b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.189646] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "617544f0-fa53-415d-9f00-c8143e8e25b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.189818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "617544f0-fa53-415d-9f00-c8143e8e25b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.192108] env[63297]: INFO nova.compute.manager [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Terminating instance [ 1641.195993] env[63297]: DEBUG nova.compute.manager [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1641.196273] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1641.197274] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90aef56e-7006-4dec-b245-a422103d536d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.205683] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1641.206528] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3756d31d-ce7d-466b-acf5-0cbbc93e0d9a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.213308] env[63297]: DEBUG oslo_vmware.api [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1641.213308] env[63297]: value = "task-1698057" [ 1641.213308] env[63297]: _type = "Task" [ 1641.213308] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.222425] env[63297]: DEBUG oslo_vmware.api [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698057, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.353264] env[63297]: DEBUG nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1641.363572] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1641.367016] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bff93e5b-e68f-4cc0-9725-08df8b8423ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.371097] env[63297]: DEBUG oslo_vmware.api [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1641.371097] env[63297]: value = "task-1698058" [ 1641.371097] env[63297]: _type = "Task" [ 1641.371097] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.387740] env[63297]: DEBUG oslo_vmware.api [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.449420] env[63297]: DEBUG nova.network.neutron [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Successfully created port: 08bfccea-a6c3-4e93-b78f-ae706f8a8469 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1641.455794] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698055, 'name': ReconfigVM_Task, 'duration_secs': 0.484209} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.459546] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Reconfigured VM instance instance-00000062 to attach disk [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418/d70bfe65-5faa-4248-9119-9a38259cb418.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1641.464018] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29bc8d53-403b-40d6-ad50-5300a62c3c5c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.478918] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1641.478918] env[63297]: value = "task-1698059" [ 1641.478918] env[63297]: _type = "Task" [ 1641.478918] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.489450] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698059, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.676687] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28247c10-d040-448b-9bbe-c3a9da3baab3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.685398] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b107944-d911-4694-89fc-aa93454ff90b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.719028] env[63297]: DEBUG nova.network.neutron [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating instance_info_cache with network_info: [{"id": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "address": "fa:16:3e:6a:dc:37", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f272d86-33", "ovs_interfaceid": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.722553] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6177425-a682-4c8f-b1de-fd304020fe43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.733877] env[63297]: DEBUG oslo_vmware.api [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698057, 'name': PowerOffVM_Task, 'duration_secs': 0.379004} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.735863] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1641.736059] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1641.736559] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c12b4e7-6729-4e8f-b04c-7710909546c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.739708] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b7fb27-3265-476e-9fd1-66b9353778f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.755504] env[63297]: DEBUG nova.compute.provider_tree [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.815402] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1641.815578] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1641.815866] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleting the datastore file [datastore1] 617544f0-fa53-415d-9f00-c8143e8e25b1 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1641.816057] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b574f077-76f7-44fd-8e08-7890c29016c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.823348] env[63297]: DEBUG oslo_vmware.api [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for the task: (returnval){ [ 1641.823348] env[63297]: value = "task-1698061" [ 1641.823348] env[63297]: _type = "Task" [ 1641.823348] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.832348] env[63297]: DEBUG oslo_vmware.api [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.843151] env[63297]: DEBUG nova.network.neutron [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance_info_cache with network_info: [{"id": "e0aae0a1-c525-4233-b876-799c11006f75", "address": "fa:16:3e:f7:ce:b0", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0aae0a1-c5", "ovs_interfaceid": "e0aae0a1-c525-4233-b876-799c11006f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.882731] env[63297]: DEBUG oslo_vmware.api [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698058, 'name': PowerOffVM_Task, 'duration_secs': 0.254029} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.883060] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1641.883220] env[63297]: DEBUG nova.compute.manager [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1641.884021] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19fcb03-8e0e-4fb3-bab4-7dd2d1065436 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.985814] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698059, 'name': Rename_Task, 'duration_secs': 0.175797} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.986112] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1641.986388] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b665aeb-7829-4fc5-8081-832b8fee842a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.992596] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1641.992596] env[63297]: value = "task-1698062" [ 1641.992596] env[63297]: _type = "Task" [ 1641.992596] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.000486] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.122313] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.122645] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.122842] env[63297]: DEBUG nova.compute.manager [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1642.123944] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acf45e8-ecef-4eb8-a444-bbfa4d2ca417 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.131368] env[63297]: DEBUG nova.compute.manager [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1642.133658] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.133829] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-433f30ac-27d5-4513-b520-9be7eed7636c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.139866] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1642.139866] env[63297]: value = "task-1698063" [ 1642.139866] env[63297]: _type = "Task" [ 1642.139866] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.148417] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698063, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.230081] env[63297]: DEBUG oslo_concurrency.lockutils [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.260797] env[63297]: DEBUG nova.scheduler.client.report [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1642.333666] env[63297]: DEBUG oslo_vmware.api [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Task: {'id': task-1698061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.313025} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.333907] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1642.334116] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1642.334329] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1642.334589] env[63297]: INFO nova.compute.manager [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1642.334933] env[63297]: DEBUG oslo.service.loopingcall [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1642.335196] env[63297]: DEBUG nova.compute.manager [-] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1642.335305] env[63297]: DEBUG nova.network.neutron [-] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1642.346014] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.366667] env[63297]: DEBUG nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1642.388525] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1642.388841] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1642.389054] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1642.389287] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1642.389468] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1642.389650] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1642.389915] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1642.390126] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1642.390337] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1642.390537] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1642.390772] env[63297]: DEBUG nova.virt.hardware [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1642.391681] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4da2519-7a76-4f39-a4f7-541e6ab73261 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.397929] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d5420f72-add8-46f5-98de-3ec5864e8b55 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.560s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.402814] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77972a56-1935-4d6e-a3ae-5c97bd3ee811 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.502621] env[63297]: DEBUG oslo_vmware.api [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698062, 'name': PowerOnVM_Task, 'duration_secs': 0.498437} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.502968] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1642.503196] env[63297]: INFO nova.compute.manager [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Took 4.38 seconds to spawn the instance on the hypervisor. [ 1642.503380] env[63297]: DEBUG nova.compute.manager [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1642.504170] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfb642e-3c79-4362-84f5-1e8f92fc66d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.655743] env[63297]: DEBUG oslo_vmware.api [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698063, 'name': PowerOffVM_Task, 'duration_secs': 0.25161} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.656048] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1642.656238] env[63297]: DEBUG nova.compute.manager [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1642.657067] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d893529-399e-4353-83c4-cc67c05243b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.769493] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.771119] env[63297]: DEBUG nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1642.775123] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.775413] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df562cc5-7299-4064-b226-a00f2d02d5c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.788456] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1642.788456] env[63297]: value = "task-1698064" [ 1642.788456] env[63297]: _type = "Task" [ 1642.788456] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.801309] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.871452] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d2d804-5cb4-4215-baa3-259663db5431 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.898392] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058d1390-2011-4c99-94f3-4d4392478930 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.907459] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance 'a6d86e78-ae24-4e70-9fb2-270177b40322' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1642.930815] env[63297]: DEBUG nova.compute.manager [req-f9d94140-5595-4099-82e3-e4848d747ab2 req-b9fd9eb2-c4d5-475e-8a0c-eda3c71035e5 service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Received event network-vif-deleted-dcb70072-f858-420c-861b-1f6f17a1615b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1642.930943] env[63297]: INFO nova.compute.manager [req-f9d94140-5595-4099-82e3-e4848d747ab2 req-b9fd9eb2-c4d5-475e-8a0c-eda3c71035e5 service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Neutron deleted interface dcb70072-f858-420c-861b-1f6f17a1615b; detaching it from the instance and deleting it from the info cache [ 1642.931152] env[63297]: DEBUG nova.network.neutron [req-f9d94140-5595-4099-82e3-e4848d747ab2 req-b9fd9eb2-c4d5-475e-8a0c-eda3c71035e5 service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.026319] env[63297]: INFO nova.compute.manager [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Took 10.61 seconds to build instance. [ 1643.120247] env[63297]: DEBUG nova.compute.manager [req-902929e2-f868-4677-ac27-fbd03ceb2c5f req-573eada2-e276-4adc-bb3c-c6f14abb6583 service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Received event network-vif-plugged-08bfccea-a6c3-4e93-b78f-ae706f8a8469 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1643.120473] env[63297]: DEBUG oslo_concurrency.lockutils [req-902929e2-f868-4677-ac27-fbd03ceb2c5f req-573eada2-e276-4adc-bb3c-c6f14abb6583 service nova] Acquiring lock "c2362520-ed07-4124-aade-bb54830b0d54-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.120679] env[63297]: DEBUG oslo_concurrency.lockutils [req-902929e2-f868-4677-ac27-fbd03ceb2c5f req-573eada2-e276-4adc-bb3c-c6f14abb6583 service nova] Lock "c2362520-ed07-4124-aade-bb54830b0d54-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.121089] env[63297]: DEBUG oslo_concurrency.lockutils [req-902929e2-f868-4677-ac27-fbd03ceb2c5f req-573eada2-e276-4adc-bb3c-c6f14abb6583 service nova] Lock "c2362520-ed07-4124-aade-bb54830b0d54-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.121427] env[63297]: DEBUG nova.compute.manager [req-902929e2-f868-4677-ac27-fbd03ceb2c5f req-573eada2-e276-4adc-bb3c-c6f14abb6583 service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] No waiting events found dispatching network-vif-plugged-08bfccea-a6c3-4e93-b78f-ae706f8a8469 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1643.122751] env[63297]: WARNING nova.compute.manager [req-902929e2-f868-4677-ac27-fbd03ceb2c5f req-573eada2-e276-4adc-bb3c-c6f14abb6583 service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Received unexpected event network-vif-plugged-08bfccea-a6c3-4e93-b78f-ae706f8a8469 for instance with vm_state building and task_state spawning. [ 1643.172218] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.049s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.191497] env[63297]: DEBUG nova.network.neutron [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Successfully updated port: 08bfccea-a6c3-4e93-b78f-ae706f8a8469 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1643.279789] env[63297]: DEBUG nova.compute.utils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1643.281281] env[63297]: DEBUG nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1643.281439] env[63297]: DEBUG nova.network.neutron [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1643.299276] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698064, 'name': PowerOffVM_Task, 'duration_secs': 0.229754} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.300363] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1643.300363] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c468f317-5d90-4f40-8196-de69f033d4f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.322026] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2554eb8b-975a-41c6-a623-537f91bcfed2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.331905] env[63297]: DEBUG nova.policy [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20a91144677b4efba8ab91acd53d1c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33733e0599840618625ecb3e6bb6029', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1643.351803] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1643.351803] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba3b9a1e-b323-4376-b7df-b2866928f605 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.360017] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1643.360017] env[63297]: value = "task-1698065" [ 1643.360017] env[63297]: _type = "Task" [ 1643.360017] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.367170] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.380017] env[63297]: DEBUG nova.network.neutron [-] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.422648] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1643.422648] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29ae8aa7-91a6-4a81-a0c5-0d3fc6fe12fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.430286] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1643.430286] env[63297]: value = "task-1698066" [ 1643.430286] env[63297]: _type = "Task" [ 1643.430286] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.436987] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d3d72d6-eae0-4a79-85e8-b56488521537 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.442633] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698066, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.446873] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96bb9796-5a25-43aa-a104-7835f4b007ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.486073] env[63297]: DEBUG nova.compute.manager [req-f9d94140-5595-4099-82e3-e4848d747ab2 req-b9fd9eb2-c4d5-475e-8a0c-eda3c71035e5 service nova] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Detach interface failed, port_id=dcb70072-f858-420c-861b-1f6f17a1615b, reason: Instance 617544f0-fa53-415d-9f00-c8143e8e25b1 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1643.528484] env[63297]: DEBUG oslo_concurrency.lockutils [None req-abc468f4-d00f-449f-bd70-94453c721828 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "d70bfe65-5faa-4248-9119-9a38259cb418" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.115s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.629287] env[63297]: DEBUG nova.network.neutron [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Successfully created port: 86ae06d7-2df8-4f63-9929-672841dfcec2 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1643.683875] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.683875] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.683875] env[63297]: DEBUG nova.objects.instance [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1643.695618] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-c2362520-ed07-4124-aade-bb54830b0d54" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.695618] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-c2362520-ed07-4124-aade-bb54830b0d54" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.695618] env[63297]: DEBUG nova.network.neutron [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1643.785767] env[63297]: DEBUG nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1643.870670] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1643.870913] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1643.871200] env[63297]: DEBUG oslo_concurrency.lockutils [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.871459] env[63297]: DEBUG oslo_concurrency.lockutils [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.871589] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.872032] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-793082c9-c543-4559-84b2-77861656d6d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.883797] env[63297]: INFO nova.compute.manager [-] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Took 1.55 seconds to deallocate network for instance. [ 1643.884283] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1643.884529] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1643.885428] env[63297]: INFO nova.compute.manager [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Rebuilding instance [ 1643.889517] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cd7f3fd-5e03-4576-a1e4-10fc0838697d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.898408] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1643.898408] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520c2ac2-b152-1096-cbcd-773a6f502ea7" [ 1643.898408] env[63297]: _type = "Task" [ 1643.898408] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.907961] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520c2ac2-b152-1096-cbcd-773a6f502ea7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.942207] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698066, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.951610] env[63297]: DEBUG nova.compute.manager [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1643.952466] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73699dd-14ea-4f3e-b7d8-30980c2b6779 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.131378] env[63297]: DEBUG nova.compute.manager [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Stashing vm_state: stopped {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1644.241492] env[63297]: DEBUG nova.network.neutron [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1644.402019] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.410846] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520c2ac2-b152-1096-cbcd-773a6f502ea7, 'name': SearchDatastore_Task, 'duration_secs': 0.013006} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.411701] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e895efc-8ed8-4779-bf3c-7f50c1102828 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.417870] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1644.417870] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5214404e-c390-6aa2-c3a4-bdc22599d70c" [ 1644.417870] env[63297]: _type = "Task" [ 1644.417870] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.426544] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5214404e-c390-6aa2-c3a4-bdc22599d70c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.439374] env[63297]: DEBUG oslo_vmware.api [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698066, 'name': PowerOnVM_Task, 'duration_secs': 0.593645} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.440041] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1644.440297] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9897e71-9ccb-4fc2-b7ee-def25bf88245 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance 'a6d86e78-ae24-4e70-9fb2-270177b40322' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1644.463883] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1644.464506] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01a538f5-26c5-4674-9930-7249bae32d73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.471756] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1644.471756] env[63297]: value = "task-1698067" [ 1644.471756] env[63297]: _type = "Task" [ 1644.471756] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.482581] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698067, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.518212] env[63297]: DEBUG nova.network.neutron [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Updating instance_info_cache with network_info: [{"id": "08bfccea-a6c3-4e93-b78f-ae706f8a8469", "address": "fa:16:3e:35:78:74", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08bfccea-a6", "ovs_interfaceid": "08bfccea-a6c3-4e93-b78f-ae706f8a8469", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.519511] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.519746] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.519953] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.520197] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.520418] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.524227] env[63297]: INFO nova.compute.manager [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Terminating instance [ 1644.525137] env[63297]: DEBUG nova.compute.manager [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1644.525467] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1644.528640] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8a72da-570a-4879-84d4-a6d6b2070ddd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.535608] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1644.535946] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-497aeb1f-564f-4fba-8d7b-10e0bee26825 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.650357] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.690457] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9d98a528-c8c5-4f57-a798-65c050d08ef6 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.691724] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.292s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.692547] env[63297]: DEBUG nova.objects.instance [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lazy-loading 'resources' on Instance uuid 617544f0-fa53-415d-9f00-c8143e8e25b1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1644.800368] env[63297]: DEBUG nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1644.828707] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1644.829173] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1644.829465] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1644.830095] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1644.830095] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1644.830285] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1644.830657] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1644.831025] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1644.831387] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1644.831723] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1644.832019] env[63297]: DEBUG nova.virt.hardware [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1644.833060] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa88d7e-f27d-4f20-8f87-3594166c4574 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.841668] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6d39df-ca5a-45c2-982d-1908bc8f3ad8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.928562] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5214404e-c390-6aa2-c3a4-bdc22599d70c, 'name': SearchDatastore_Task, 'duration_secs': 0.020952} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.928879] env[63297]: DEBUG oslo_concurrency.lockutils [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.929235] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. {{(pid=63297) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1644.929536] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e07bab5-b7f8-4a15-8964-ba118bce1e68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.937887] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1644.937887] env[63297]: value = "task-1698069" [ 1644.937887] env[63297]: _type = "Task" [ 1644.937887] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.948263] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.982964] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698067, 'name': PowerOffVM_Task, 'duration_secs': 0.241957} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.983347] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1644.984148] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1644.984657] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d34830-4194-4b53-8a6a-8556f5d4c6fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.992369] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1644.992628] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8e9a98f-6c3e-46ac-b44f-5eff2dca62c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.024285] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-c2362520-ed07-4124-aade-bb54830b0d54" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.024285] env[63297]: DEBUG nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Instance network_info: |[{"id": "08bfccea-a6c3-4e93-b78f-ae706f8a8469", "address": "fa:16:3e:35:78:74", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08bfccea-a6", "ovs_interfaceid": "08bfccea-a6c3-4e93-b78f-ae706f8a8469", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1645.024665] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:78:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08bfccea-a6c3-4e93-b78f-ae706f8a8469', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1645.032910] env[63297]: DEBUG oslo.service.loopingcall [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1645.033772] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1645.034063] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1645.034254] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1645.034430] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Deleting the datastore file [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1645.034674] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b1199d7-33c8-48bb-9f6c-3d5e71db9bd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.051931] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd2490c4-8e17-4536-ab5d-2070946fc6ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.059543] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1645.059543] env[63297]: value = "task-1698071" [ 1645.059543] env[63297]: _type = "Task" [ 1645.059543] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.064330] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1645.064330] env[63297]: value = "task-1698072" [ 1645.064330] env[63297]: _type = "Task" [ 1645.064330] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.070380] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698071, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.075466] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.153772] env[63297]: DEBUG nova.compute.manager [req-1b9c536a-1866-4de1-addf-52d748d5b2e7 req-8dc8af68-f012-427d-9bd1-3dbc6080005b service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Received event network-changed-08bfccea-a6c3-4e93-b78f-ae706f8a8469 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1645.154027] env[63297]: DEBUG nova.compute.manager [req-1b9c536a-1866-4de1-addf-52d748d5b2e7 req-8dc8af68-f012-427d-9bd1-3dbc6080005b service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Refreshing instance network info cache due to event network-changed-08bfccea-a6c3-4e93-b78f-ae706f8a8469. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1645.154214] env[63297]: DEBUG oslo_concurrency.lockutils [req-1b9c536a-1866-4de1-addf-52d748d5b2e7 req-8dc8af68-f012-427d-9bd1-3dbc6080005b service nova] Acquiring lock "refresh_cache-c2362520-ed07-4124-aade-bb54830b0d54" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.154358] env[63297]: DEBUG oslo_concurrency.lockutils [req-1b9c536a-1866-4de1-addf-52d748d5b2e7 req-8dc8af68-f012-427d-9bd1-3dbc6080005b service nova] Acquired lock "refresh_cache-c2362520-ed07-4124-aade-bb54830b0d54" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.154745] env[63297]: DEBUG nova.network.neutron [req-1b9c536a-1866-4de1-addf-52d748d5b2e7 req-8dc8af68-f012-427d-9bd1-3dbc6080005b service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Refreshing network info cache for port 08bfccea-a6c3-4e93-b78f-ae706f8a8469 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1645.427055] env[63297]: DEBUG nova.network.neutron [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Successfully updated port: 86ae06d7-2df8-4f63-9929-672841dfcec2 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1645.453917] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698069, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.547054] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d0aea6-31f9-4cd4-a7d6-4e14dccfdf72 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.556023] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e76907-9571-487e-b9ed-d5b848168a3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.594672] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55642a24-db2d-4d29-b33a-5fd257017238 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.600243] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698071, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.605787] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115167} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.607461] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1645.607658] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1645.607831] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1645.611380] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e64df2-83f8-4823-8cbd-271ed29b8a6c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.625031] env[63297]: DEBUG nova.compute.provider_tree [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1645.698562] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1645.698888] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1645.699151] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleting the datastore file [datastore1] 1110d6ca-ca5f-44d1-baca-c22c8fc166b5 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1645.699434] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7dd85477-70a7-4d29-89c6-c7642c5b4373 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.706524] env[63297]: DEBUG oslo_vmware.api [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1645.706524] env[63297]: value = "task-1698073" [ 1645.706524] env[63297]: _type = "Task" [ 1645.706524] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.715242] env[63297]: DEBUG oslo_vmware.api [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698073, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.859440] env[63297]: DEBUG nova.network.neutron [req-1b9c536a-1866-4de1-addf-52d748d5b2e7 req-8dc8af68-f012-427d-9bd1-3dbc6080005b service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Updated VIF entry in instance network info cache for port 08bfccea-a6c3-4e93-b78f-ae706f8a8469. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1645.859809] env[63297]: DEBUG nova.network.neutron [req-1b9c536a-1866-4de1-addf-52d748d5b2e7 req-8dc8af68-f012-427d-9bd1-3dbc6080005b service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Updating instance_info_cache with network_info: [{"id": "08bfccea-a6c3-4e93-b78f-ae706f8a8469", "address": "fa:16:3e:35:78:74", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08bfccea-a6", "ovs_interfaceid": "08bfccea-a6c3-4e93-b78f-ae706f8a8469", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.927950] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "refresh_cache-c89d23b7-deb0-4394-9a42-2ac3990da98d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.928109] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "refresh_cache-c89d23b7-deb0-4394-9a42-2ac3990da98d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.928258] env[63297]: DEBUG nova.network.neutron [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1645.952787] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698069, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610256} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.953062] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk. [ 1645.953826] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f90b4a-a332-4f57-b898-78f03a8c9d64 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.986330] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1645.988192] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94018ab7-99c1-4219-aeb7-aebf8aedb934 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.007689] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1646.007689] env[63297]: value = "task-1698074" [ 1646.007689] env[63297]: _type = "Task" [ 1646.007689] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.020999] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698074, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.075055] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698071, 'name': CreateVM_Task, 'duration_secs': 0.746024} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.075445] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1646.076578] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.076947] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.077505] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1646.077900] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7dc2de0-ee27-4d5b-bd66-fb0733f35c45 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.084492] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1646.084492] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d2e5ad-a462-6f28-cd89-e43eb9c71951" [ 1646.084492] env[63297]: _type = "Task" [ 1646.084492] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.093179] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d2e5ad-a462-6f28-cd89-e43eb9c71951, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.150459] env[63297]: ERROR nova.scheduler.client.report [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] [req-285604df-2fb4-48a6-bc55-6fddd80a61a6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-285604df-2fb4-48a6-bc55-6fddd80a61a6"}]} [ 1646.166204] env[63297]: DEBUG nova.scheduler.client.report [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1646.181146] env[63297]: DEBUG nova.scheduler.client.report [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1646.181421] env[63297]: DEBUG nova.compute.provider_tree [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1646.193954] env[63297]: DEBUG nova.scheduler.client.report [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1646.213306] env[63297]: DEBUG nova.scheduler.client.report [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1646.219111] env[63297]: DEBUG oslo_vmware.api [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698073, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343573} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.219111] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1646.219272] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1646.219434] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1646.219642] env[63297]: INFO nova.compute.manager [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1646.219886] env[63297]: DEBUG oslo.service.loopingcall [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1646.220423] env[63297]: DEBUG nova.compute.manager [-] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1646.220524] env[63297]: DEBUG nova.network.neutron [-] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1646.366523] env[63297]: DEBUG oslo_concurrency.lockutils [req-1b9c536a-1866-4de1-addf-52d748d5b2e7 req-8dc8af68-f012-427d-9bd1-3dbc6080005b service nova] Releasing lock "refresh_cache-c2362520-ed07-4124-aade-bb54830b0d54" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.483215] env[63297]: DEBUG nova.network.neutron [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1646.496174] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678002f1-40fd-4ab3-9c98-e083abade7ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.504676] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5d63eb-c13d-4dd9-811e-b477a3a10b3d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.537695] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3448c3b2-78e2-4351-b332-6b825e8b3c51 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.545501] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698074, 'name': ReconfigVM_Task, 'duration_secs': 0.434962} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.548136] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94/41f1ad71-37f2-4e86-a900-da4965eba44f-rescue.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1646.548737] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11d6e8f-9f92-4196-8bb2-aa28279bafef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.552163] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b7d631-542c-4a95-9a26-cd163703fbad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.568566] env[63297]: DEBUG nova.compute.provider_tree [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.601880] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8178ef6e-29bc-4140-a123-aeaa651b3b19 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.620924] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d2e5ad-a462-6f28-cd89-e43eb9c71951, 'name': SearchDatastore_Task, 'duration_secs': 0.011151} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.624967] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.625172] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1646.625422] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.625570] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.625748] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1646.626097] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1646.626097] env[63297]: value = "task-1698075" [ 1646.626097] env[63297]: _type = "Task" [ 1646.626097] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.628799] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b7541a2-4992-4b72-8cca-32d4b8a452db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.641232] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.645541] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1646.645809] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1646.646581] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef985924-8a3e-4ce9-905e-db758f3f002e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.651684] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1646.651927] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1646.652113] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1646.652339] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1646.652535] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1646.652690] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1646.652902] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1646.653079] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1646.653266] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1646.653428] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1646.653598] env[63297]: DEBUG nova.virt.hardware [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1646.654669] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02e8bd1-f27c-4f95-af03-154f30aff9e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.663456] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbced68-e0f3-47ab-ac2a-270bf235446e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.667930] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1646.667930] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522349b5-8500-8a9c-de84-73148bcf8133" [ 1646.667930] env[63297]: _type = "Task" [ 1646.667930] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.680275] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Instance VIF info [] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1646.686207] env[63297]: DEBUG oslo.service.loopingcall [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1646.688639] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1646.688779] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c6dd5eb-d013-4ea6-9d9d-10c2aaaac82d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.704228] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522349b5-8500-8a9c-de84-73148bcf8133, 'name': SearchDatastore_Task, 'duration_secs': 0.022019} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.705320] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfe3a0dd-8774-431f-ac7f-24ae4b449f71 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.709584] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1646.709584] env[63297]: value = "task-1698076" [ 1646.709584] env[63297]: _type = "Task" [ 1646.709584] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.710799] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1646.710799] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a58979-1a76-6b69-6380-3df31b8e80e4" [ 1646.710799] env[63297]: _type = "Task" [ 1646.710799] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.723945] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698076, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.728439] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a58979-1a76-6b69-6380-3df31b8e80e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.737415] env[63297]: DEBUG nova.network.neutron [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Updating instance_info_cache with network_info: [{"id": "86ae06d7-2df8-4f63-9929-672841dfcec2", "address": "fa:16:3e:6a:05:48", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86ae06d7-2d", "ovs_interfaceid": "86ae06d7-2df8-4f63-9929-672841dfcec2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.099151] env[63297]: DEBUG nova.scheduler.client.report [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1647.115488] env[63297]: DEBUG nova.network.neutron [-] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.140470] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698075, 'name': ReconfigVM_Task, 'duration_secs': 0.181497} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.140825] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1647.141160] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9f0db8f-7d96-46b4-b00e-34fec089d39f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.148094] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1647.148094] env[63297]: value = "task-1698077" [ 1647.148094] env[63297]: _type = "Task" [ 1647.148094] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.155699] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.184222] env[63297]: DEBUG nova.compute.manager [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Received event network-vif-plugged-86ae06d7-2df8-4f63-9929-672841dfcec2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1647.184409] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] Acquiring lock "c89d23b7-deb0-4394-9a42-2ac3990da98d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.184703] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] Lock "c89d23b7-deb0-4394-9a42-2ac3990da98d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.184959] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] Lock "c89d23b7-deb0-4394-9a42-2ac3990da98d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.185196] env[63297]: DEBUG nova.compute.manager [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] No waiting events found dispatching network-vif-plugged-86ae06d7-2df8-4f63-9929-672841dfcec2 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1647.185454] env[63297]: WARNING nova.compute.manager [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Received unexpected event network-vif-plugged-86ae06d7-2df8-4f63-9929-672841dfcec2 for instance with vm_state building and task_state spawning. [ 1647.185645] env[63297]: DEBUG nova.compute.manager [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Received event network-changed-86ae06d7-2df8-4f63-9929-672841dfcec2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1647.185833] env[63297]: DEBUG nova.compute.manager [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Refreshing instance network info cache due to event network-changed-86ae06d7-2df8-4f63-9929-672841dfcec2. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1647.186039] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] Acquiring lock "refresh_cache-c89d23b7-deb0-4394-9a42-2ac3990da98d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.223050] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698076, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.226594] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a58979-1a76-6b69-6380-3df31b8e80e4, 'name': SearchDatastore_Task, 'duration_secs': 0.01789} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.226854] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.227119] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c2362520-ed07-4124-aade-bb54830b0d54/c2362520-ed07-4124-aade-bb54830b0d54.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1647.227372] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc690be2-7909-45a1-ad54-b6a43b48e12e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.233587] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1647.233587] env[63297]: value = "task-1698078" [ 1647.233587] env[63297]: _type = "Task" [ 1647.233587] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.243554] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "refresh_cache-c89d23b7-deb0-4394-9a42-2ac3990da98d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.243894] env[63297]: DEBUG nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Instance network_info: |[{"id": "86ae06d7-2df8-4f63-9929-672841dfcec2", "address": "fa:16:3e:6a:05:48", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86ae06d7-2d", "ovs_interfaceid": "86ae06d7-2df8-4f63-9929-672841dfcec2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1647.244210] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698078, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.244460] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] Acquired lock "refresh_cache-c89d23b7-deb0-4394-9a42-2ac3990da98d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.244651] env[63297]: DEBUG nova.network.neutron [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Refreshing network info cache for port 86ae06d7-2df8-4f63-9929-672841dfcec2 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1647.245982] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:05:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86ae06d7-2df8-4f63-9929-672841dfcec2', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1647.254522] env[63297]: DEBUG oslo.service.loopingcall [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.258276] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1647.258906] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e03a12e7-3195-4649-8e60-142cb27bfee5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.276716] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "a6d86e78-ae24-4e70-9fb2-270177b40322" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.277529] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.277529] env[63297]: DEBUG nova.compute.manager [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Going to confirm migration 3 {{(pid=63297) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1647.284933] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1647.284933] env[63297]: value = "task-1698079" [ 1647.284933] env[63297]: _type = "Task" [ 1647.284933] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.294521] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698079, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.582754] env[63297]: DEBUG nova.network.neutron [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Updated VIF entry in instance network info cache for port 86ae06d7-2df8-4f63-9929-672841dfcec2. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1647.583354] env[63297]: DEBUG nova.network.neutron [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Updating instance_info_cache with network_info: [{"id": "86ae06d7-2df8-4f63-9929-672841dfcec2", "address": "fa:16:3e:6a:05:48", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86ae06d7-2d", "ovs_interfaceid": "86ae06d7-2df8-4f63-9929-672841dfcec2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.604169] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.912s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.607764] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.957s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.618282] env[63297]: INFO nova.compute.manager [-] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Took 1.40 seconds to deallocate network for instance. [ 1647.645091] env[63297]: INFO nova.scheduler.client.report [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Deleted allocations for instance 617544f0-fa53-415d-9f00-c8143e8e25b1 [ 1647.663463] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698077, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.723470] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698076, 'name': CreateVM_Task, 'duration_secs': 0.675138} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.724437] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1647.724864] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.725038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.725369] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1647.725856] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebc3514b-eea4-41fd-aacb-92a14661dd9e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.731140] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1647.731140] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52257f6c-2fd6-cac1-b27b-f4fbca8fa7e2" [ 1647.731140] env[63297]: _type = "Task" [ 1647.731140] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.745349] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52257f6c-2fd6-cac1-b27b-f4fbca8fa7e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.748592] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698078, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501563} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.748838] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c2362520-ed07-4124-aade-bb54830b0d54/c2362520-ed07-4124-aade-bb54830b0d54.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1647.749072] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1647.749320] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-acd894cf-3641-4e5d-af4e-7a8d68b67531 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.755372] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1647.755372] env[63297]: value = "task-1698080" [ 1647.755372] env[63297]: _type = "Task" [ 1647.755372] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.763812] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698080, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.795883] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698079, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.817782] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.818010] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.818213] env[63297]: DEBUG nova.network.neutron [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1647.818417] env[63297]: DEBUG nova.objects.instance [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lazy-loading 'info_cache' on Instance uuid a6d86e78-ae24-4e70-9fb2-270177b40322 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1648.087555] env[63297]: DEBUG oslo_concurrency.lockutils [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] Releasing lock "refresh_cache-c89d23b7-deb0-4394-9a42-2ac3990da98d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.087895] env[63297]: DEBUG nova.compute.manager [req-b6898058-34bb-48ec-8cea-7ce77f2071cd req-0e651d8c-ae0a-4155-aadf-41cfed02720d service nova] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Received event network-vif-deleted-67f99216-a730-4066-be67-21fcb4979776 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1648.116415] env[63297]: INFO nova.compute.claims [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1648.126489] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.156223] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2ad701b-6ef2-41ea-b3ae-b3a1ddeaebf2 tempest-ImagesTestJSON-256849719 tempest-ImagesTestJSON-256849719-project-member] Lock "617544f0-fa53-415d-9f00-c8143e8e25b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.967s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.162646] env[63297]: DEBUG oslo_vmware.api [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698077, 'name': PowerOnVM_Task, 'duration_secs': 0.535086} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.162889] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1648.165052] env[63297]: DEBUG nova.compute.manager [None req-91e6f763-862d-46f7-a417-8ab2a0cf204f tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1648.165805] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeda9f74-187e-4e60-9968-7b53bd6217a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.244106] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52257f6c-2fd6-cac1-b27b-f4fbca8fa7e2, 'name': SearchDatastore_Task, 'duration_secs': 0.013283} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.244501] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.244792] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1648.245105] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.245305] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.245536] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1648.245846] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b2acd4b-22cc-4912-a1f5-b24956cdd109 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.262105] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1648.262339] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1648.266803] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8e53284-2ec8-4275-9b64-0254692bbaea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.269631] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698080, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068574} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.269931] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1648.271292] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a91473-7e3d-41c3-a9ae-095d157b6f6c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.275846] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1648.275846] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f1b283-f8fb-a5f8-2418-c05ead3cbd86" [ 1648.275846] env[63297]: _type = "Task" [ 1648.275846] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.302058] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] c2362520-ed07-4124-aade-bb54830b0d54/c2362520-ed07-4124-aade-bb54830b0d54.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1648.309341] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9724ab1b-e6a8-455b-a7fe-e670747bfa49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.327418] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f1b283-f8fb-a5f8-2418-c05ead3cbd86, 'name': SearchDatastore_Task, 'duration_secs': 0.019351} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.327560] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-accf753a-d868-4e30-8332-13b583e3fb41 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.332497] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698079, 'name': CreateVM_Task, 'duration_secs': 0.669484} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.333134] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1648.334565] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.334732] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.335075] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1648.336740] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1648.336740] env[63297]: value = "task-1698081" [ 1648.336740] env[63297]: _type = "Task" [ 1648.336740] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.337852] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93f62fdf-ec5b-4920-88a6-371221f90b37 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.346154] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1648.346154] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529ecb16-624d-f0f6-2af9-f7b5a3ae59d4" [ 1648.346154] env[63297]: _type = "Task" [ 1648.346154] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.352458] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.352458] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1648.352458] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529fe254-90af-a3db-4396-1d74afe4610d" [ 1648.352458] env[63297]: _type = "Task" [ 1648.352458] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.367517] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529fe254-90af-a3db-4396-1d74afe4610d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.367700] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529ecb16-624d-f0f6-2af9-f7b5a3ae59d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.623476] env[63297]: INFO nova.compute.resource_tracker [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating resource usage from migration a3981554-81b6-4a57-8746-6c355d8fcc2f [ 1648.864365] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.873988] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529ecb16-624d-f0f6-2af9-f7b5a3ae59d4, 'name': SearchDatastore_Task, 'duration_secs': 0.025523} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.877146] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.877450] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418/d70bfe65-5faa-4248-9119-9a38259cb418.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1648.877749] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc3e38ea-439e-4eb0-a814-1049458b2f2a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.884158] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529fe254-90af-a3db-4396-1d74afe4610d, 'name': SearchDatastore_Task, 'duration_secs': 0.013897} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.884989] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.885256] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1648.885489] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.885637] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.885817] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1648.886146] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34f05c68-9c01-4d3d-b3fb-ec66b07ce2e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.889672] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f2ff54-fe6e-4724-80e7-40754bf893d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.893361] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1648.893361] env[63297]: value = "task-1698082" [ 1648.893361] env[63297]: _type = "Task" [ 1648.893361] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.899408] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1648.899600] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1648.901464] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3ad806-779b-4cdf-86db-abfc8c96a670 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.904637] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3244604f-47e6-416f-9345-9743d046769a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.909737] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.913181] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1648.913181] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524008b2-b938-f361-542b-9eb2d02e7549" [ 1648.913181] env[63297]: _type = "Task" [ 1648.913181] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.944477] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11db43c5-ff73-4943-bd9d-e8ea5302a860 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.956578] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e848e5-cc71-4fb8-9003-22116b5a46c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.960537] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524008b2-b938-f361-542b-9eb2d02e7549, 'name': SearchDatastore_Task, 'duration_secs': 0.03759} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.962204] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45aa4c32-fffa-498b-91a1-f24befdcaf5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.975480] env[63297]: DEBUG nova.compute.provider_tree [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1648.982009] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1648.982009] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fa7ec5-6f6f-4fb8-1d94-6d46a9425a51" [ 1648.982009] env[63297]: _type = "Task" [ 1648.982009] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.994118] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fa7ec5-6f6f-4fb8-1d94-6d46a9425a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.095490] env[63297]: DEBUG nova.network.neutron [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance_info_cache with network_info: [{"id": "e0aae0a1-c525-4233-b876-799c11006f75", "address": "fa:16:3e:f7:ce:b0", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0aae0a1-c5", "ovs_interfaceid": "e0aae0a1-c525-4233-b876-799c11006f75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.352037] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698081, 'name': ReconfigVM_Task, 'duration_secs': 1.014911} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.352457] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Reconfigured VM instance instance-00000063 to attach disk [datastore1] c2362520-ed07-4124-aade-bb54830b0d54/c2362520-ed07-4124-aade-bb54830b0d54.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1649.352935] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bc33109-422c-46e9-9512-0f66e68fb806 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.360397] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1649.360397] env[63297]: value = "task-1698083" [ 1649.360397] env[63297]: _type = "Task" [ 1649.360397] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.373849] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698083, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.404796] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698082, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.481286] env[63297]: DEBUG nova.scheduler.client.report [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1649.497682] env[63297]: INFO nova.compute.manager [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Unrescuing [ 1649.497900] env[63297]: DEBUG oslo_concurrency.lockutils [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.498072] env[63297]: DEBUG oslo_concurrency.lockutils [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquired lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.498250] env[63297]: DEBUG nova.network.neutron [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1649.500571] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fa7ec5-6f6f-4fb8-1d94-6d46a9425a51, 'name': SearchDatastore_Task, 'duration_secs': 0.030516} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.502091] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.502358] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c89d23b7-deb0-4394-9a42-2ac3990da98d/c89d23b7-deb0-4394-9a42-2ac3990da98d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1649.502716] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e4be4fb-dd0e-4096-8a7b-9703befc4970 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.511220] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1649.511220] env[63297]: value = "task-1698084" [ 1649.511220] env[63297]: _type = "Task" [ 1649.511220] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.523876] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.598643] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-a6d86e78-ae24-4e70-9fb2-270177b40322" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.599062] env[63297]: DEBUG nova.objects.instance [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lazy-loading 'migration_context' on Instance uuid a6d86e78-ae24-4e70-9fb2-270177b40322 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1649.873849] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698083, 'name': Rename_Task, 'duration_secs': 0.394016} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.874213] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1649.874494] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e59cb0d-60a6-4485-ad44-d9b9334110a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.882931] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1649.882931] env[63297]: value = "task-1698085" [ 1649.882931] env[63297]: _type = "Task" [ 1649.882931] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.894936] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698085, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.905060] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698082, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.783454} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.905514] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418/d70bfe65-5faa-4248-9119-9a38259cb418.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1649.905878] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1649.906261] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd5f542b-8c9e-4056-b595-60510507acb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.916169] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1649.916169] env[63297]: value = "task-1698086" [ 1649.916169] env[63297]: _type = "Task" [ 1649.916169] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.928641] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.993023] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.382s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.993023] env[63297]: INFO nova.compute.manager [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Migrating [ 1649.999431] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.873s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.000111] env[63297]: DEBUG nova.objects.instance [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'resources' on Instance uuid 1110d6ca-ca5f-44d1-baca-c22c8fc166b5 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1650.032829] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698084, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.103506] env[63297]: DEBUG nova.objects.base [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1650.104895] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3c9205-d867-43dc-b7f8-ec6c076dadd6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.137323] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a9ede27-07ef-487f-8071-e00fd470fb85 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.143478] env[63297]: DEBUG oslo_vmware.api [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1650.143478] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f6b6d2-1d4b-da33-6219-50b32f6eb6bb" [ 1650.143478] env[63297]: _type = "Task" [ 1650.143478] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.159404] env[63297]: DEBUG oslo_vmware.api [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52f6b6d2-1d4b-da33-6219-50b32f6eb6bb, 'name': SearchDatastore_Task, 'duration_secs': 0.006962} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.159404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.312770] env[63297]: DEBUG nova.network.neutron [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating instance_info_cache with network_info: [{"id": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "address": "fa:16:3e:6a:dc:37", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f272d86-33", "ovs_interfaceid": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.327796] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e1f0d1-0b7a-4e5d-afc1-e4adcedb8d34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.338194] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d47be6b-90f1-4dab-ba1b-28a6dc7ea296 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.371775] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b0820b-31d6-4741-b14f-fe10f007c0b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.379413] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7fdd23-5654-44b0-8c70-619ce8ab5ac0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.396395] env[63297]: DEBUG nova.compute.provider_tree [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1650.402160] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698085, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.426955] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090219} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.427336] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1650.428235] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6274143c-cf22-4a6f-b184-c6794afb812c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.450660] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418/d70bfe65-5faa-4248-9119-9a38259cb418.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1650.451431] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-869e2e1a-52f3-48b9-b290-9fa769813459 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.472194] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1650.472194] env[63297]: value = "task-1698087" [ 1650.472194] env[63297]: _type = "Task" [ 1650.472194] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.480765] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.522246] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.522436] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.522612] env[63297]: DEBUG nova.network.neutron [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1650.531830] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698084, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63939} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.532189] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c89d23b7-deb0-4394-9a42-2ac3990da98d/c89d23b7-deb0-4394-9a42-2ac3990da98d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1650.532460] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1650.532758] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11f4802c-fee0-4974-8189-3c0c020f354a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.540938] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1650.540938] env[63297]: value = "task-1698088" [ 1650.540938] env[63297]: _type = "Task" [ 1650.540938] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.549789] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698088, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.815787] env[63297]: DEBUG oslo_concurrency.lockutils [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Releasing lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.816507] env[63297]: DEBUG nova.objects.instance [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lazy-loading 'flavor' on Instance uuid 1d8c6df5-069f-4647-a2f6-e69a4bf8be94 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1650.894756] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698085, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.933174] env[63297]: DEBUG nova.scheduler.client.report [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1650.933461] env[63297]: DEBUG nova.compute.provider_tree [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 135 to 136 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1650.933636] env[63297]: DEBUG nova.compute.provider_tree [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1650.985123] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698087, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.052090] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698088, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066124} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.052090] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1651.053324] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ce9703-4571-4693-a5f6-dcbdad8112b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.076145] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] c89d23b7-deb0-4394-9a42-2ac3990da98d/c89d23b7-deb0-4394-9a42-2ac3990da98d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1651.078776] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dda24d8f-3d01-4372-ab1e-a6f964ddfb4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.101095] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1651.101095] env[63297]: value = "task-1698089" [ 1651.101095] env[63297]: _type = "Task" [ 1651.101095] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.111891] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698089, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.323483] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d3ce52-e310-4a13-a6bd-8becf0ad9e75 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.348009] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1651.349227] env[63297]: DEBUG nova.network.neutron [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [{"id": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "address": "fa:16:3e:23:ee:4c", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ab49-f8", "ovs_interfaceid": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.350396] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4afcf06e-a34d-403f-b2a2-e745dbb8980f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.357224] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1651.357224] env[63297]: value = "task-1698090" [ 1651.357224] env[63297]: _type = "Task" [ 1651.357224] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.367357] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.394124] env[63297]: DEBUG oslo_vmware.api [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698085, 'name': PowerOnVM_Task, 'duration_secs': 1.244488} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.394433] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1651.394641] env[63297]: INFO nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1651.394820] env[63297]: DEBUG nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1651.395637] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61730d1-0920-4898-af47-24a0ff04b5a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.439037] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.439s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.440827] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.282s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.466442] env[63297]: INFO nova.scheduler.client.report [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted allocations for instance 1110d6ca-ca5f-44d1-baca-c22c8fc166b5 [ 1651.483409] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698087, 'name': ReconfigVM_Task, 'duration_secs': 0.753903} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.483409] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Reconfigured VM instance instance-00000062 to attach disk [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418/d70bfe65-5faa-4248-9119-9a38259cb418.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1651.483999] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb70fc97-8d3f-42a5-9a3d-4a6f5c4bc342 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.490228] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1651.490228] env[63297]: value = "task-1698091" [ 1651.490228] env[63297]: _type = "Task" [ 1651.490228] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.500632] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698091, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.612085] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698089, 'name': ReconfigVM_Task, 'duration_secs': 0.457086} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.612085] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Reconfigured VM instance instance-00000064 to attach disk [datastore1] c89d23b7-deb0-4394-9a42-2ac3990da98d/c89d23b7-deb0-4394-9a42-2ac3990da98d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1651.612593] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-540dcd06-9266-4463-9248-205f96a81c48 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.619805] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1651.619805] env[63297]: value = "task-1698092" [ 1651.619805] env[63297]: _type = "Task" [ 1651.619805] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.628499] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698092, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.853661] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.866687] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698090, 'name': PowerOffVM_Task, 'duration_secs': 0.250911} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.868129] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1651.872734] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfiguring VM instance instance-00000059 to detach disk 2002 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1651.873844] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3a934f4-ebc4-4f25-8cfc-dfecec6702db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.892854] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1651.892854] env[63297]: value = "task-1698093" [ 1651.892854] env[63297]: _type = "Task" [ 1651.892854] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.902437] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698093, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.912022] env[63297]: INFO nova.compute.manager [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Took 15.42 seconds to build instance. [ 1651.974740] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c727b8ff-43e2-49bc-ad17-6f453d3bbe60 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "1110d6ca-ca5f-44d1-baca-c22c8fc166b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.455s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.005916] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698091, 'name': Rename_Task, 'duration_secs': 0.157729} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.007339] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1652.010651] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55b9859a-f045-4f97-b720-8deb4430c72f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.022437] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1652.022437] env[63297]: value = "task-1698094" [ 1652.022437] env[63297]: _type = "Task" [ 1652.022437] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.034654] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.130972] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698092, 'name': Rename_Task, 'duration_secs': 0.216506} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.131288] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1652.131542] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76197ba4-0315-4040-a4f1-01b00a560254 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.143542] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1652.143542] env[63297]: value = "task-1698095" [ 1652.143542] env[63297]: _type = "Task" [ 1652.143542] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.153198] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.197120] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7c0876-eae6-4666-8897-fc04822c8bef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.206926] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d76421-ce41-43a8-ace3-1d27e2fc6678 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.245606] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0723c345-0d99-46a9-af88-e98385a7c308 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.253948] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04d6c99-c56e-4538-a403-76e5195236b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.268455] env[63297]: DEBUG nova.compute.provider_tree [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.404590] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698093, 'name': ReconfigVM_Task, 'duration_secs': 0.295738} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.404865] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfigured VM instance instance-00000059 to detach disk 2002 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1652.408175] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1652.408175] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0c74e9a-d39e-4ecb-a4dd-4e583542cf60 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.415395] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa7a2de9-5402-47e7-9ff9-61f99026744d tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c2362520-ed07-4124-aade-bb54830b0d54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.925s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.415395] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1652.415395] env[63297]: value = "task-1698096" [ 1652.415395] env[63297]: _type = "Task" [ 1652.415395] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.423767] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698096, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.522290] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9459622-f200-45ca-afec-9104938596ad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.534131] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698094, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.535669] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-68538d92-f457-4726-a199-d113c2019138 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Suspending the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1652.536051] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-98ef0043-3688-4490-83d2-2812d38f73cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.545018] env[63297]: DEBUG oslo_vmware.api [None req-68538d92-f457-4726-a199-d113c2019138 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1652.545018] env[63297]: value = "task-1698097" [ 1652.545018] env[63297]: _type = "Task" [ 1652.545018] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.550816] env[63297]: DEBUG oslo_vmware.api [None req-68538d92-f457-4726-a199-d113c2019138 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698097, 'name': SuspendVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.653477] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698095, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.773027] env[63297]: DEBUG nova.scheduler.client.report [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1652.930297] env[63297]: DEBUG oslo_vmware.api [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698096, 'name': PowerOnVM_Task, 'duration_secs': 0.389303} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.930847] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1652.931285] env[63297]: DEBUG nova.compute.manager [None req-63b7adca-bbbb-455d-8162-8f2cacd0a1a2 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1652.933219] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2691c4-30c5-4b1c-ac07-b2938a9332c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.039980] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698094, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.054797] env[63297]: DEBUG oslo_vmware.api [None req-68538d92-f457-4726-a199-d113c2019138 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698097, 'name': SuspendVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.166507] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698095, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.374202] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42bd138-866d-4438-9ec0-5e20f6e7130b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.397176] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance 'b1ed5d76-d358-49d3-a854-8f968bc987ad' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1653.538615] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698094, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.553994] env[63297]: DEBUG oslo_vmware.api [None req-68538d92-f457-4726-a199-d113c2019138 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698097, 'name': SuspendVM_Task} progress is 54%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.662111] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698095, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.785670] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.345s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.904689] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1653.906221] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b4f28d9-fe20-4349-9da8-8f8e9c62830e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.917491] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1653.917491] env[63297]: value = "task-1698098" [ 1653.917491] env[63297]: _type = "Task" [ 1653.917491] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.926921] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1653.927153] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance 'b1ed5d76-d358-49d3-a854-8f968bc987ad' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1654.034508] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698094, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.055903] env[63297]: DEBUG oslo_vmware.api [None req-68538d92-f457-4726-a199-d113c2019138 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698097, 'name': SuspendVM_Task, 'duration_secs': 1.044786} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.056224] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-68538d92-f457-4726-a199-d113c2019138 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Suspended the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1654.056414] env[63297]: DEBUG nova.compute.manager [None req-68538d92-f457-4726-a199-d113c2019138 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1654.057332] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0513db-7d16-4fe0-b536-411e9dc1b45c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.162348] env[63297]: DEBUG oslo_vmware.api [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698095, 'name': PowerOnVM_Task, 'duration_secs': 1.67703} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.164344] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1654.164439] env[63297]: INFO nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Took 9.37 seconds to spawn the instance on the hypervisor. [ 1654.164603] env[63297]: DEBUG nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1654.165449] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8099fffc-ce72-49e8-acb4-c0e0bdee1718 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.373665] env[63297]: INFO nova.scheduler.client.report [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted allocation for migration 4c165943-95ae-4824-8da7-8d8dcf153e5d [ 1654.434306] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1654.434513] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1654.434669] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1654.434853] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1654.435011] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1654.435175] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1654.435385] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1654.435544] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1654.435710] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1654.435872] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1654.436061] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1654.441495] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17736769-0d82-4ae5-85e4-8d535936c547 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.462101] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1654.462101] env[63297]: value = "task-1698099" [ 1654.462101] env[63297]: _type = "Task" [ 1654.462101] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.470830] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698099, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.541692] env[63297]: DEBUG oslo_vmware.api [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698094, 'name': PowerOnVM_Task, 'duration_secs': 2.357351} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.542111] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1654.542264] env[63297]: DEBUG nova.compute.manager [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1654.543126] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9651179-ee1f-46e0-882e-110ab34f4a3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.690176] env[63297]: INFO nova.compute.manager [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Took 14.55 seconds to build instance. [ 1654.844506] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.847156] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.847156] env[63297]: INFO nova.compute.manager [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Shelving [ 1654.881774] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1504b3b8-23a2-4740-833e-22462711007e tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.603s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.888724] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.889025] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.974426] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698099, 'name': ReconfigVM_Task, 'duration_secs': 0.152182} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.975173] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance 'b1ed5d76-d358-49d3-a854-8f968bc987ad' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1655.011304] env[63297]: DEBUG nova.compute.manager [req-90c89568-b1da-4db5-8728-2159f1c87621 req-b8345330-c531-4c4b-ad30-0b354137b807 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Received event network-changed-8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1655.011548] env[63297]: DEBUG nova.compute.manager [req-90c89568-b1da-4db5-8728-2159f1c87621 req-b8345330-c531-4c4b-ad30-0b354137b807 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Refreshing instance network info cache due to event network-changed-8f272d86-3373-42d6-8f0d-94e83e8e6b2c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1655.011730] env[63297]: DEBUG oslo_concurrency.lockutils [req-90c89568-b1da-4db5-8728-2159f1c87621 req-b8345330-c531-4c4b-ad30-0b354137b807 service nova] Acquiring lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.011873] env[63297]: DEBUG oslo_concurrency.lockutils [req-90c89568-b1da-4db5-8728-2159f1c87621 req-b8345330-c531-4c4b-ad30-0b354137b807 service nova] Acquired lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1655.014369] env[63297]: DEBUG nova.network.neutron [req-90c89568-b1da-4db5-8728-2159f1c87621 req-b8345330-c531-4c4b-ad30-0b354137b807 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Refreshing network info cache for port 8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1655.064474] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.064774] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.064952] env[63297]: DEBUG nova.objects.instance [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1655.193684] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2857426e-6cb4-45aa-81a4-dc98c5779437 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "c89d23b7-deb0-4394-9a42-2ac3990da98d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.063s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.352529] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1655.352801] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df4b55ba-1381-46ab-a25b-411a087a8c73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.360330] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1655.360330] env[63297]: value = "task-1698100" [ 1655.360330] env[63297]: _type = "Task" [ 1655.360330] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.368620] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.394764] env[63297]: DEBUG nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1655.483662] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1655.483662] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1655.483662] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1655.483662] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1655.483662] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1655.483992] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1655.484165] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1655.484801] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1655.484801] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1655.484801] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1655.485031] env[63297]: DEBUG nova.virt.hardware [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1655.491727] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Reconfiguring VM instance instance-0000003b to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1655.492110] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6dead83f-ae74-4ae1-ae39-0a5d3bce83c3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.516908] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1655.516908] env[63297]: value = "task-1698101" [ 1655.516908] env[63297]: _type = "Task" [ 1655.516908] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.526942] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698101, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.586853] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "c2362520-ed07-4124-aade-bb54830b0d54" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.587145] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c2362520-ed07-4124-aade-bb54830b0d54" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.587332] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "c2362520-ed07-4124-aade-bb54830b0d54-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.587547] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c2362520-ed07-4124-aade-bb54830b0d54-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.587720] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c2362520-ed07-4124-aade-bb54830b0d54-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.593181] env[63297]: INFO nova.compute.manager [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Terminating instance [ 1655.595834] env[63297]: DEBUG nova.compute.manager [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1655.596049] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1655.596979] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabc1fa1-932d-46b9-8525-1e284967b1cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.615520] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1655.618085] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbe78bdb-a70d-47a8-9ce3-1c7b07ac5620 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.731023] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1655.731408] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1655.731646] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleting the datastore file [datastore1] c2362520-ed07-4124-aade-bb54830b0d54 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1655.731931] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61fbfeb3-6ca6-46cf-84ee-d494aa3fa634 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.739413] env[63297]: DEBUG oslo_vmware.api [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1655.739413] env[63297]: value = "task-1698103" [ 1655.739413] env[63297]: _type = "Task" [ 1655.739413] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.748191] env[63297]: DEBUG oslo_vmware.api [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.895719] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698100, 'name': PowerOffVM_Task, 'duration_secs': 0.41859} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.896621] env[63297]: DEBUG nova.network.neutron [req-90c89568-b1da-4db5-8728-2159f1c87621 req-b8345330-c531-4c4b-ad30-0b354137b807 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updated VIF entry in instance network info cache for port 8f272d86-3373-42d6-8f0d-94e83e8e6b2c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1655.897031] env[63297]: DEBUG nova.network.neutron [req-90c89568-b1da-4db5-8728-2159f1c87621 req-b8345330-c531-4c4b-ad30-0b354137b807 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating instance_info_cache with network_info: [{"id": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "address": "fa:16:3e:6a:dc:37", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f272d86-33", "ovs_interfaceid": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1655.898172] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1655.900704] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c7a293-8dc9-4d81-b9b9-7d895beede77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.927457] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4295adfa-f74e-460d-8118-f34b42972f74 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.932648] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.026339] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698101, 'name': ReconfigVM_Task, 'duration_secs': 0.196379} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.026644] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Reconfigured VM instance instance-0000003b to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1656.027428] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d900055b-09f9-4327-93e9-d11e4e57bd32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.049187] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] b1ed5d76-d358-49d3-a854-8f968bc987ad/b1ed5d76-d358-49d3-a854-8f968bc987ad.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1656.050605] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-437b8536-016d-4659-b7f7-f9d33a688244 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.066152] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "3ab25962-2150-4331-a018-aa61bd082814" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.066388] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "3ab25962-2150-4331-a018-aa61bd082814" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.073552] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1656.073552] env[63297]: value = "task-1698104" [ 1656.073552] env[63297]: _type = "Task" [ 1656.073552] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.079672] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17ea7bcb-b7b4-4135-9c8c-281f8abe665b tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.080622] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.149s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.082085] env[63297]: INFO nova.compute.claims [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1656.091832] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698104, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.109581] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "c89d23b7-deb0-4394-9a42-2ac3990da98d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.109848] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "c89d23b7-deb0-4394-9a42-2ac3990da98d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.110125] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "c89d23b7-deb0-4394-9a42-2ac3990da98d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.110451] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "c89d23b7-deb0-4394-9a42-2ac3990da98d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.110545] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "c89d23b7-deb0-4394-9a42-2ac3990da98d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.112795] env[63297]: INFO nova.compute.manager [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Terminating instance [ 1656.115082] env[63297]: DEBUG nova.compute.manager [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1656.115202] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1656.116039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c21026-17e2-49f2-91a5-f680522977a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.123568] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1656.124376] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49a66c14-4d9f-4827-b0f1-4a9f65dfbad5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.135745] env[63297]: DEBUG oslo_vmware.api [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1656.135745] env[63297]: value = "task-1698105" [ 1656.135745] env[63297]: _type = "Task" [ 1656.135745] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.149571] env[63297]: DEBUG oslo_vmware.api [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.250650] env[63297]: DEBUG oslo_vmware.api [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.310601] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "d70bfe65-5faa-4248-9119-9a38259cb418" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.310995] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "d70bfe65-5faa-4248-9119-9a38259cb418" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.311295] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "d70bfe65-5faa-4248-9119-9a38259cb418-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.311524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "d70bfe65-5faa-4248-9119-9a38259cb418-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.311781] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "d70bfe65-5faa-4248-9119-9a38259cb418-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.314874] env[63297]: INFO nova.compute.manager [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Terminating instance [ 1656.316876] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "refresh_cache-d70bfe65-5faa-4248-9119-9a38259cb418" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.317104] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquired lock "refresh_cache-d70bfe65-5faa-4248-9119-9a38259cb418" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.317326] env[63297]: DEBUG nova.network.neutron [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1656.400275] env[63297]: DEBUG oslo_concurrency.lockutils [req-90c89568-b1da-4db5-8728-2159f1c87621 req-b8345330-c531-4c4b-ad30-0b354137b807 service nova] Releasing lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.444038] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1656.444038] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c9c111d4-e182-4dc8-8473-67490c87b54c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.451396] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1656.451396] env[63297]: value = "task-1698106" [ 1656.451396] env[63297]: _type = "Task" [ 1656.451396] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.461179] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698106, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.569364] env[63297]: DEBUG nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1656.585010] env[63297]: DEBUG oslo_vmware.api [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698104, 'name': ReconfigVM_Task, 'duration_secs': 0.4492} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.587019] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Reconfigured VM instance instance-0000003b to attach disk [datastore1] b1ed5d76-d358-49d3-a854-8f968bc987ad/b1ed5d76-d358-49d3-a854-8f968bc987ad.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1656.587019] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance 'b1ed5d76-d358-49d3-a854-8f968bc987ad' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1656.646393] env[63297]: DEBUG oslo_vmware.api [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698105, 'name': PowerOffVM_Task, 'duration_secs': 0.285499} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.646393] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1656.646393] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1656.646393] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81c2713f-b9bd-4d72-8fec-bf4c4ec8b0be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.736543] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1656.736763] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1656.736952] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleting the datastore file [datastore1] c89d23b7-deb0-4394-9a42-2ac3990da98d {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1656.737234] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d204af2b-90fc-4c5f-8c26-e497f28e4910 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.746319] env[63297]: DEBUG oslo_vmware.api [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1656.746319] env[63297]: value = "task-1698108" [ 1656.746319] env[63297]: _type = "Task" [ 1656.746319] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.752742] env[63297]: DEBUG oslo_vmware.api [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.669805} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.753348] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1656.753566] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1656.753754] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1656.753929] env[63297]: INFO nova.compute.manager [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1656.754201] env[63297]: DEBUG oslo.service.loopingcall [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1656.754412] env[63297]: DEBUG nova.compute.manager [-] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1656.754511] env[63297]: DEBUG nova.network.neutron [-] [instance: c2362520-ed07-4124-aade-bb54830b0d54] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1656.762041] env[63297]: DEBUG oslo_vmware.api [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.799728] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "a6d86e78-ae24-4e70-9fb2-270177b40322" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.800016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.800239] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.800462] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.800649] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.802734] env[63297]: INFO nova.compute.manager [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Terminating instance [ 1656.804577] env[63297]: DEBUG nova.compute.manager [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1656.804772] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1656.805606] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50060f16-f0b6-47d2-942e-97d1b540731d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.813420] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1656.813686] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbbfb0a6-f757-4f07-99ff-b1aa7c1ebad0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.821665] env[63297]: DEBUG oslo_vmware.api [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1656.821665] env[63297]: value = "task-1698109" [ 1656.821665] env[63297]: _type = "Task" [ 1656.821665] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.830460] env[63297]: DEBUG oslo_vmware.api [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.840783] env[63297]: DEBUG nova.network.neutron [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1656.914918] env[63297]: DEBUG nova.network.neutron [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.969107] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698106, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.035263] env[63297]: DEBUG nova.compute.manager [req-433dd9be-959e-49c6-b0ee-71e62875d0c8 req-eaee5a41-ba19-4f62-9803-3fd5545c65c5 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Received event network-changed-8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1657.035349] env[63297]: DEBUG nova.compute.manager [req-433dd9be-959e-49c6-b0ee-71e62875d0c8 req-eaee5a41-ba19-4f62-9803-3fd5545c65c5 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Refreshing instance network info cache due to event network-changed-8f272d86-3373-42d6-8f0d-94e83e8e6b2c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1657.035766] env[63297]: DEBUG oslo_concurrency.lockutils [req-433dd9be-959e-49c6-b0ee-71e62875d0c8 req-eaee5a41-ba19-4f62-9803-3fd5545c65c5 service nova] Acquiring lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.035766] env[63297]: DEBUG oslo_concurrency.lockutils [req-433dd9be-959e-49c6-b0ee-71e62875d0c8 req-eaee5a41-ba19-4f62-9803-3fd5545c65c5 service nova] Acquired lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.035956] env[63297]: DEBUG nova.network.neutron [req-433dd9be-959e-49c6-b0ee-71e62875d0c8 req-eaee5a41-ba19-4f62-9803-3fd5545c65c5 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Refreshing network info cache for port 8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1657.055697] env[63297]: DEBUG nova.compute.manager [req-f3d2a70f-24e8-453e-be13-099008c9f615 req-c161df2a-3b6b-44a5-b075-5fc6117f04a9 service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Received event network-vif-deleted-08bfccea-a6c3-4e93-b78f-ae706f8a8469 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1657.055901] env[63297]: INFO nova.compute.manager [req-f3d2a70f-24e8-453e-be13-099008c9f615 req-c161df2a-3b6b-44a5-b075-5fc6117f04a9 service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Neutron deleted interface 08bfccea-a6c3-4e93-b78f-ae706f8a8469; detaching it from the instance and deleting it from the info cache [ 1657.056083] env[63297]: DEBUG nova.network.neutron [req-f3d2a70f-24e8-453e-be13-099008c9f615 req-c161df2a-3b6b-44a5-b075-5fc6117f04a9 service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.092076] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.096762] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9ef492-0055-4476-87c8-ebf3afe80f77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.117824] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a6b3dc-f103-4179-a247-eb91d121bf66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.135630] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance 'b1ed5d76-d358-49d3-a854-8f968bc987ad' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1657.258680] env[63297]: DEBUG oslo_vmware.api [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300816} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.260152] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1657.260347] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1657.260517] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1657.260679] env[63297]: INFO nova.compute.manager [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1657.260904] env[63297]: DEBUG oslo.service.loopingcall [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1657.265371] env[63297]: DEBUG nova.compute.manager [-] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1657.265482] env[63297]: DEBUG nova.network.neutron [-] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1657.267122] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "d7dc0672-c908-418e-bfcb-8daa761fba37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.267338] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.331549] env[63297]: DEBUG oslo_vmware.api [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698109, 'name': PowerOffVM_Task, 'duration_secs': 0.248117} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.334324] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1657.334512] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1657.334931] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68d59f55-8b8e-4b5d-83d0-b4533f2d74d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.356436] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba268faf-0e73-4150-a5c6-f6b3089e6e72 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.363187] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d399cc4-3f96-4e57-bf1e-f2e5e7388ba8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.393761] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf302475-7c4a-4c75-af9d-01520a70365a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.401847] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6549f1-5966-46dc-b5da-5f8390761eca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.416256] env[63297]: DEBUG nova.compute.provider_tree [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1657.417863] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Releasing lock "refresh_cache-d70bfe65-5faa-4248-9119-9a38259cb418" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.418238] env[63297]: DEBUG nova.compute.manager [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1657.418431] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1657.419222] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872ff713-4a6c-4fd6-b04e-74aa352e9362 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.423124] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1657.423321] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1657.423494] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleting the datastore file [datastore1] a6d86e78-ae24-4e70-9fb2-270177b40322 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1657.424066] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f0db380-67a7-4184-bf42-c501ca0d26e7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.427534] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1657.428067] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51c1003a-148b-4b8c-a058-5a594517242b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.432131] env[63297]: DEBUG oslo_vmware.api [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1657.432131] env[63297]: value = "task-1698111" [ 1657.432131] env[63297]: _type = "Task" [ 1657.432131] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.436396] env[63297]: DEBUG oslo_vmware.api [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1657.436396] env[63297]: value = "task-1698112" [ 1657.436396] env[63297]: _type = "Task" [ 1657.436396] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.445086] env[63297]: DEBUG oslo_vmware.api [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.449465] env[63297]: DEBUG oslo_vmware.api [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698112, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.460447] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698106, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.530487] env[63297]: DEBUG nova.network.neutron [-] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.559436] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38e591af-9a9e-4274-a47a-135cdb74d9f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.568294] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14579a0d-6586-4b8b-be19-ca27929a7809 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.601977] env[63297]: DEBUG nova.compute.manager [req-f3d2a70f-24e8-453e-be13-099008c9f615 req-c161df2a-3b6b-44a5-b075-5fc6117f04a9 service nova] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Detach interface failed, port_id=08bfccea-a6c3-4e93-b78f-ae706f8a8469, reason: Instance c2362520-ed07-4124-aade-bb54830b0d54 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1657.674112] env[63297]: DEBUG nova.network.neutron [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Port c8c6ab49-f882-4349-bddd-cfb1a972afc0 binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1657.769557] env[63297]: DEBUG nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1657.784042] env[63297]: DEBUG nova.network.neutron [req-433dd9be-959e-49c6-b0ee-71e62875d0c8 req-eaee5a41-ba19-4f62-9803-3fd5545c65c5 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updated VIF entry in instance network info cache for port 8f272d86-3373-42d6-8f0d-94e83e8e6b2c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1657.784395] env[63297]: DEBUG nova.network.neutron [req-433dd9be-959e-49c6-b0ee-71e62875d0c8 req-eaee5a41-ba19-4f62-9803-3fd5545c65c5 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating instance_info_cache with network_info: [{"id": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "address": "fa:16:3e:6a:dc:37", "network": {"id": "6931e431-61d6-4a80-a895-4d46ae814982", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1186930984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54456153a472421890c889a6f2c62b38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f272d86-33", "ovs_interfaceid": "8f272d86-3373-42d6-8f0d-94e83e8e6b2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.944494] env[63297]: ERROR nova.scheduler.client.report [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [req-d0ad94ae-4bfa-4c37-abe1-207ede79f24c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d0ad94ae-4bfa-4c37-abe1-207ede79f24c"}]} [ 1657.944906] env[63297]: DEBUG oslo_vmware.api [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231409} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.947535] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1657.948054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1657.948054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1657.948135] env[63297]: INFO nova.compute.manager [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1657.948431] env[63297]: DEBUG oslo.service.loopingcall [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1657.948573] env[63297]: DEBUG nova.compute.manager [-] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1657.948675] env[63297]: DEBUG nova.network.neutron [-] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1657.954043] env[63297]: DEBUG oslo_vmware.api [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698112, 'name': PowerOffVM_Task, 'duration_secs': 0.125773} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.957410] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1657.957587] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1657.957870] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bd571dc-0b58-4277-879c-3921e22a2ebb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.961690] env[63297]: DEBUG nova.scheduler.client.report [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1657.967370] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698106, 'name': CreateSnapshot_Task, 'duration_secs': 1.07857} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.967832] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1657.968425] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68960db5-1fba-4deb-a6ef-4d86028d99fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.972088] env[63297]: DEBUG nova.network.neutron [-] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.981632] env[63297]: DEBUG nova.scheduler.client.report [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1657.981893] env[63297]: DEBUG nova.compute.provider_tree [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 179, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1657.985883] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1657.986127] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1657.986317] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Deleting the datastore file [datastore1] d70bfe65-5faa-4248-9119-9a38259cb418 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1657.986573] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a841fcff-4b03-4940-bd13-21d7df8d65c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.994797] env[63297]: DEBUG oslo_vmware.api [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for the task: (returnval){ [ 1657.994797] env[63297]: value = "task-1698114" [ 1657.994797] env[63297]: _type = "Task" [ 1657.994797] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.998415] env[63297]: DEBUG nova.scheduler.client.report [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1658.005177] env[63297]: DEBUG oslo_vmware.api [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.016665] env[63297]: DEBUG nova.scheduler.client.report [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1658.033922] env[63297]: INFO nova.compute.manager [-] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Took 1.28 seconds to deallocate network for instance. [ 1658.263469] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d731770-331a-4364-9f84-1f6fbd4ac1c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.271119] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f660201-f9c1-4686-956e-efffef34cbcf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.302802] env[63297]: DEBUG oslo_concurrency.lockutils [req-433dd9be-959e-49c6-b0ee-71e62875d0c8 req-eaee5a41-ba19-4f62-9803-3fd5545c65c5 service nova] Releasing lock "refresh_cache-1d8c6df5-069f-4647-a2f6-e69a4bf8be94" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1658.304444] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.305208] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9aa0833-1c91-4bab-93c7-93dfc5b60fdd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.313493] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb293ba-c21a-4274-9cc4-3bf52a162227 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.326616] env[63297]: DEBUG nova.compute.provider_tree [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1658.474968] env[63297]: INFO nova.compute.manager [-] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Took 1.21 seconds to deallocate network for instance. [ 1658.487323] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1658.487608] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d35e0c7c-3e3a-4a5d-920c-c783d1485f48 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.496851] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1658.496851] env[63297]: value = "task-1698115" [ 1658.496851] env[63297]: _type = "Task" [ 1658.496851] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.509550] env[63297]: DEBUG oslo_vmware.api [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Task: {'id': task-1698114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364549} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.512701] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1658.512900] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1658.513085] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1658.513283] env[63297]: INFO nova.compute.manager [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1658.513488] env[63297]: DEBUG oslo.service.loopingcall [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1658.513689] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698115, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.513887] env[63297]: DEBUG nova.compute.manager [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1658.513981] env[63297]: DEBUG nova.network.neutron [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1658.530573] env[63297]: DEBUG nova.network.neutron [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1658.539725] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.695472] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.695808] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.695855] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.730916] env[63297]: DEBUG nova.network.neutron [-] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.861075] env[63297]: DEBUG nova.scheduler.client.report [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1658.861382] env[63297]: DEBUG nova.compute.provider_tree [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 140 to 141 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1658.861653] env[63297]: DEBUG nova.compute.provider_tree [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1658.981261] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.010277] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698115, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.032835] env[63297]: DEBUG nova.network.neutron [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.085173] env[63297]: DEBUG nova.compute.manager [req-d29b4fd4-74b8-4257-a965-cc0d203f0b2a req-f467a842-c22f-4197-972b-bf18c5c0499d service nova] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Received event network-vif-deleted-86ae06d7-2df8-4f63-9929-672841dfcec2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1659.085341] env[63297]: DEBUG nova.compute.manager [req-d29b4fd4-74b8-4257-a965-cc0d203f0b2a req-f467a842-c22f-4197-972b-bf18c5c0499d service nova] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Received event network-vif-deleted-e0aae0a1-c525-4233-b876-799c11006f75 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1659.233043] env[63297]: INFO nova.compute.manager [-] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Took 1.28 seconds to deallocate network for instance. [ 1659.367820] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.287s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.368372] env[63297]: DEBUG nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1659.371090] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.279s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.372498] env[63297]: INFO nova.compute.claims [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1659.511121] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698115, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.535856] env[63297]: INFO nova.compute.manager [-] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Took 1.02 seconds to deallocate network for instance. [ 1659.731260] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.731521] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.731680] env[63297]: DEBUG nova.network.neutron [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1659.738841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.876960] env[63297]: DEBUG nova.compute.utils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1659.881875] env[63297]: DEBUG nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1659.882052] env[63297]: DEBUG nova.network.neutron [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1659.923025] env[63297]: DEBUG nova.policy [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d8413d4aad4ed8a1fa9e436de117ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc81b0f87c64b2283eb0ece21fb31a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1660.012207] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698115, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.042371] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.225611] env[63297]: DEBUG nova.network.neutron [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Successfully created port: 8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1660.383066] env[63297]: DEBUG nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1660.514758] env[63297]: DEBUG nova.network.neutron [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [{"id": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "address": "fa:16:3e:23:ee:4c", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ab49-f8", "ovs_interfaceid": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.519492] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698115, 'name': CloneVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.647775] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69517882-b17c-4a01-b66a-d2160f6d3464 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.655338] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b044c673-5c2c-4001-97be-e97e0a9a1536 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.684925] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6edc76-dbd6-4cd1-aec4-549133c25b4d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.692052] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e302819-7abb-40c8-9aa1-8cceb1e50e60 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.704610] env[63297]: DEBUG nova.compute.provider_tree [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1661.013020] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698115, 'name': CloneVM_Task, 'duration_secs': 2.03978} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.013317] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Created linked-clone VM from snapshot [ 1661.014168] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86932e4-af0a-4d80-b8c3-c198677ffba7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.021858] env[63297]: DEBUG oslo_concurrency.lockutils [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.025529] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Uploading image 6d9483fb-3122-4c2b-800b-dca528822bb1 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1661.054161] env[63297]: DEBUG oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1661.054161] env[63297]: value = "vm-353998" [ 1661.054161] env[63297]: _type = "VirtualMachine" [ 1661.054161] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1661.054453] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-083fc915-af04-4783-9f3e-9cc46ae06828 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.062072] env[63297]: DEBUG oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lease: (returnval){ [ 1661.062072] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c86fd9-7e6b-e8dc-0cd5-b89038c5d443" [ 1661.062072] env[63297]: _type = "HttpNfcLease" [ 1661.062072] env[63297]: } obtained for exporting VM: (result){ [ 1661.062072] env[63297]: value = "vm-353998" [ 1661.062072] env[63297]: _type = "VirtualMachine" [ 1661.062072] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1661.062310] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the lease: (returnval){ [ 1661.062310] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c86fd9-7e6b-e8dc-0cd5-b89038c5d443" [ 1661.062310] env[63297]: _type = "HttpNfcLease" [ 1661.062310] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1661.069260] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1661.069260] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c86fd9-7e6b-e8dc-0cd5-b89038c5d443" [ 1661.069260] env[63297]: _type = "HttpNfcLease" [ 1661.069260] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1661.208074] env[63297]: DEBUG nova.scheduler.client.report [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1661.397519] env[63297]: DEBUG nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1661.419240] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1661.419492] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1661.419648] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1661.419827] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1661.419972] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1661.420131] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1661.420333] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1661.420493] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1661.420700] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1661.420867] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1661.421058] env[63297]: DEBUG nova.virt.hardware [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1661.421937] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95858eb-70a4-40b7-b330-56cd6f46cd6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.429902] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764ce047-03d4-4dc0-b88c-5d08328dc7bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.545854] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc23bba9-3971-4fb2-a7be-11d6cbbeb0d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.568468] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54a9649-6454-4d07-8c73-f7c6b3076078 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.574793] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1661.574793] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c86fd9-7e6b-e8dc-0cd5-b89038c5d443" [ 1661.574793] env[63297]: _type = "HttpNfcLease" [ 1661.574793] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1661.577102] env[63297]: DEBUG oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1661.577102] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c86fd9-7e6b-e8dc-0cd5-b89038c5d443" [ 1661.577102] env[63297]: _type = "HttpNfcLease" [ 1661.577102] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1661.577474] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance 'b1ed5d76-d358-49d3-a854-8f968bc987ad' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1661.581463] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed15e99f-6986-42ca-bc3b-0b0fc6ce1f8d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.588723] env[63297]: DEBUG oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a343ac-bf5f-bd20-7b52-c6bc0ae6f03c/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1661.588902] env[63297]: DEBUG oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a343ac-bf5f-bd20-7b52-c6bc0ae6f03c/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1661.652466] env[63297]: DEBUG nova.compute.manager [req-e029f782-2d15-4407-97f1-c58068ae9883 req-5fc7e46f-101f-4429-abb3-cd625df2785d service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Received event network-vif-plugged-8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1661.652719] env[63297]: DEBUG oslo_concurrency.lockutils [req-e029f782-2d15-4407-97f1-c58068ae9883 req-5fc7e46f-101f-4429-abb3-cd625df2785d service nova] Acquiring lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.652928] env[63297]: DEBUG oslo_concurrency.lockutils [req-e029f782-2d15-4407-97f1-c58068ae9883 req-5fc7e46f-101f-4429-abb3-cd625df2785d service nova] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.653111] env[63297]: DEBUG oslo_concurrency.lockutils [req-e029f782-2d15-4407-97f1-c58068ae9883 req-5fc7e46f-101f-4429-abb3-cd625df2785d service nova] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.653282] env[63297]: DEBUG nova.compute.manager [req-e029f782-2d15-4407-97f1-c58068ae9883 req-5fc7e46f-101f-4429-abb3-cd625df2785d service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] No waiting events found dispatching network-vif-plugged-8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1661.653448] env[63297]: WARNING nova.compute.manager [req-e029f782-2d15-4407-97f1-c58068ae9883 req-5fc7e46f-101f-4429-abb3-cd625df2785d service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Received unexpected event network-vif-plugged-8b0d7883-16da-4bdb-b728-dbcd6772ccdb for instance with vm_state building and task_state spawning. [ 1661.686117] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a1c9a8eb-7c0d-4fbf-98a6-bdf4517636d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.714630] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.715199] env[63297]: DEBUG nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1661.717847] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.413s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.721056] env[63297]: INFO nova.compute.claims [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1661.740022] env[63297]: DEBUG nova.network.neutron [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Successfully updated port: 8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1662.086707] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-601a08a8-d432-4cc0-9a90-12bbc1ed95ff tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance 'b1ed5d76-d358-49d3-a854-8f968bc987ad' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1662.225281] env[63297]: DEBUG nova.compute.utils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1662.228893] env[63297]: DEBUG nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1662.229177] env[63297]: DEBUG nova.network.neutron [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1662.241443] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.241721] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.241964] env[63297]: DEBUG nova.network.neutron [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1662.274543] env[63297]: DEBUG nova.policy [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c1a03cca2224878ae9ccbb9e42a1b2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd636a91a492a4f538bc2fc8634f5fa14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1662.535811] env[63297]: DEBUG nova.network.neutron [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Successfully created port: d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1662.734256] env[63297]: DEBUG nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1662.779779] env[63297]: DEBUG nova.network.neutron [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1662.941184] env[63297]: DEBUG nova.network.neutron [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Updating instance_info_cache with network_info: [{"id": "8b0d7883-16da-4bdb-b728-dbcd6772ccdb", "address": "fa:16:3e:b7:04:f1", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0d7883-16", "ovs_interfaceid": "8b0d7883-16da-4bdb-b728-dbcd6772ccdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.047994] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb45414-2e5c-48cc-a7bd-95ef626d5b00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.056956] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b5275e-2753-46c4-b0d3-91b2954a8920 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.093327] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca4bade-1d42-4823-8d44-d87b34ac5bbe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.105499] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727a0363-5a72-47f8-aa65-021a2a5bb4f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.120710] env[63297]: DEBUG nova.compute.provider_tree [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1663.444295] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.444686] env[63297]: DEBUG nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Instance network_info: |[{"id": "8b0d7883-16da-4bdb-b728-dbcd6772ccdb", "address": "fa:16:3e:b7:04:f1", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0d7883-16", "ovs_interfaceid": "8b0d7883-16da-4bdb-b728-dbcd6772ccdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1663.445336] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:04:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b0d7883-16da-4bdb-b728-dbcd6772ccdb', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1663.453137] env[63297]: DEBUG oslo.service.loopingcall [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1663.453580] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1663.453580] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f082d97-c38f-4bd7-a570-05956e6de2c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.475332] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1663.475332] env[63297]: value = "task-1698117" [ 1663.475332] env[63297]: _type = "Task" [ 1663.475332] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.484508] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698117, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.626716] env[63297]: DEBUG nova.scheduler.client.report [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1663.667452] env[63297]: DEBUG nova.compute.manager [req-5410136b-26fc-4799-806b-0f4fa31a7bea req-fa1a4414-8422-4441-926f-8f683a7acf88 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Received event network-changed-8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1663.667665] env[63297]: DEBUG nova.compute.manager [req-5410136b-26fc-4799-806b-0f4fa31a7bea req-fa1a4414-8422-4441-926f-8f683a7acf88 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Refreshing instance network info cache due to event network-changed-8b0d7883-16da-4bdb-b728-dbcd6772ccdb. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1663.667885] env[63297]: DEBUG oslo_concurrency.lockutils [req-5410136b-26fc-4799-806b-0f4fa31a7bea req-fa1a4414-8422-4441-926f-8f683a7acf88 service nova] Acquiring lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.668231] env[63297]: DEBUG oslo_concurrency.lockutils [req-5410136b-26fc-4799-806b-0f4fa31a7bea req-fa1a4414-8422-4441-926f-8f683a7acf88 service nova] Acquired lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.668449] env[63297]: DEBUG nova.network.neutron [req-5410136b-26fc-4799-806b-0f4fa31a7bea req-fa1a4414-8422-4441-926f-8f683a7acf88 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Refreshing network info cache for port 8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1663.744108] env[63297]: DEBUG nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1663.772221] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1663.772723] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1663.772869] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1663.773168] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1663.773397] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1663.773842] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1663.773908] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1663.774118] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1663.774341] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1663.774533] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1663.774719] env[63297]: DEBUG nova.virt.hardware [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1663.775732] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f44d81-a5db-4940-8305-582a6da43491 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.785732] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4532744-5419-4b47-a91a-2b744ccbe623 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.985959] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698117, 'name': CreateVM_Task, 'duration_secs': 0.455546} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.986138] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1663.986790] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.987058] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.987285] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1663.987532] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06194aad-d8b1-4a2f-8c32-072b86e0e552 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.992340] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1663.992340] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b52bd-c33f-ba15-17fd-c5ee3702f3c8" [ 1663.992340] env[63297]: _type = "Task" [ 1663.992340] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.006108] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b52bd-c33f-ba15-17fd-c5ee3702f3c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.050302] env[63297]: DEBUG nova.compute.manager [req-766e785f-74bb-44fd-838c-c52008037229 req-a2854c43-636a-460e-89dd-2f76eb7cbdde service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Received event network-vif-plugged-d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1664.050518] env[63297]: DEBUG oslo_concurrency.lockutils [req-766e785f-74bb-44fd-838c-c52008037229 req-a2854c43-636a-460e-89dd-2f76eb7cbdde service nova] Acquiring lock "3ab25962-2150-4331-a018-aa61bd082814-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.050734] env[63297]: DEBUG oslo_concurrency.lockutils [req-766e785f-74bb-44fd-838c-c52008037229 req-a2854c43-636a-460e-89dd-2f76eb7cbdde service nova] Lock "3ab25962-2150-4331-a018-aa61bd082814-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.050919] env[63297]: DEBUG oslo_concurrency.lockutils [req-766e785f-74bb-44fd-838c-c52008037229 req-a2854c43-636a-460e-89dd-2f76eb7cbdde service nova] Lock "3ab25962-2150-4331-a018-aa61bd082814-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.051623] env[63297]: DEBUG nova.compute.manager [req-766e785f-74bb-44fd-838c-c52008037229 req-a2854c43-636a-460e-89dd-2f76eb7cbdde service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] No waiting events found dispatching network-vif-plugged-d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1664.051846] env[63297]: WARNING nova.compute.manager [req-766e785f-74bb-44fd-838c-c52008037229 req-a2854c43-636a-460e-89dd-2f76eb7cbdde service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Received unexpected event network-vif-plugged-d469619d-b568-437d-8023-8d02e02b7350 for instance with vm_state building and task_state spawning. [ 1664.131305] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.131830] env[63297]: DEBUG nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1664.134659] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.595s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.135897] env[63297]: DEBUG nova.objects.instance [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'resources' on Instance uuid c2362520-ed07-4124-aade-bb54830b0d54 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1664.467630] env[63297]: DEBUG nova.network.neutron [req-5410136b-26fc-4799-806b-0f4fa31a7bea req-fa1a4414-8422-4441-926f-8f683a7acf88 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Updated VIF entry in instance network info cache for port 8b0d7883-16da-4bdb-b728-dbcd6772ccdb. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1664.468154] env[63297]: DEBUG nova.network.neutron [req-5410136b-26fc-4799-806b-0f4fa31a7bea req-fa1a4414-8422-4441-926f-8f683a7acf88 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Updating instance_info_cache with network_info: [{"id": "8b0d7883-16da-4bdb-b728-dbcd6772ccdb", "address": "fa:16:3e:b7:04:f1", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0d7883-16", "ovs_interfaceid": "8b0d7883-16da-4bdb-b728-dbcd6772ccdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.504172] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526b52bd-c33f-ba15-17fd-c5ee3702f3c8, 'name': SearchDatastore_Task, 'duration_secs': 0.015481} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.504516] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.504774] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1664.505053] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1664.505211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.505468] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1664.505784] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3780ac37-6732-4e37-b14c-47b8e5bae72c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.515818] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1664.515997] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1664.516760] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f07f5bef-aec3-48ba-b1a8-ba676e7836f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.522345] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1664.522345] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a7444-ecc2-f1f0-0cac-df6d766fbe1b" [ 1664.522345] env[63297]: _type = "Task" [ 1664.522345] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.530931] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a7444-ecc2-f1f0-0cac-df6d766fbe1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.543219] env[63297]: DEBUG nova.network.neutron [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Successfully updated port: d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1664.638070] env[63297]: DEBUG nova.compute.utils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1664.642869] env[63297]: DEBUG nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1664.643061] env[63297]: DEBUG nova.network.neutron [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1664.683418] env[63297]: DEBUG nova.policy [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f80bce1510594b8a95537f814f68b2bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45448893e7ee4b8d896d1bb3f3a9ecf1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1664.781782] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.782076] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.782279] env[63297]: DEBUG nova.compute.manager [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Going to confirm migration 4 {{(pid=63297) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1664.891060] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4508c7f8-82ee-4c71-b291-16f397adecdd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.899015] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122afae7-744f-4fe5-93e6-8b4d78c0a5bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.932257] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e676d418-536a-416c-b8a8-ea9f3a62ad29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.940318] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d534c60-53dc-4082-bd39-325dc61154a3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.954585] env[63297]: DEBUG nova.compute.provider_tree [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1664.972107] env[63297]: DEBUG oslo_concurrency.lockutils [req-5410136b-26fc-4799-806b-0f4fa31a7bea req-fa1a4414-8422-4441-926f-8f683a7acf88 service nova] Releasing lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.005120] env[63297]: DEBUG nova.network.neutron [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Successfully created port: 51feb81a-d695-4671-800d-b58470af4ae2 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1665.033910] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528a7444-ecc2-f1f0-0cac-df6d766fbe1b, 'name': SearchDatastore_Task, 'duration_secs': 0.012785} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.034714] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23b63e6c-daac-4755-94c7-6b612893da85 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.040305] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1665.040305] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524c131e-ac36-4a38-7241-cbcfce5eb248" [ 1665.040305] env[63297]: _type = "Task" [ 1665.040305] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.048553] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.048695] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.048875] env[63297]: DEBUG nova.network.neutron [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1665.049864] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524c131e-ac36-4a38-7241-cbcfce5eb248, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.143700] env[63297]: DEBUG nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1665.331305] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.331515] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.331794] env[63297]: DEBUG nova.network.neutron [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1665.331992] env[63297]: DEBUG nova.objects.instance [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'info_cache' on Instance uuid b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.457676] env[63297]: DEBUG nova.scheduler.client.report [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1665.555239] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524c131e-ac36-4a38-7241-cbcfce5eb248, 'name': SearchDatastore_Task, 'duration_secs': 0.023638} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.555579] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.555785] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22/f87867c3-58d4-4bd6-b6ef-1608ebef6b22.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1665.556359] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f42cec5-0c5e-4bcf-9e39-4b9dbc66bb9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.563553] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1665.563553] env[63297]: value = "task-1698118" [ 1665.563553] env[63297]: _type = "Task" [ 1665.563553] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.571909] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.595967] env[63297]: DEBUG nova.network.neutron [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1665.695127] env[63297]: DEBUG nova.compute.manager [req-06c33fa6-a81c-42c4-b6f3-b2b04a0d5887 req-12e1865f-9c5d-4af7-9078-5f6fe805911a service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Received event network-changed-d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1665.695339] env[63297]: DEBUG nova.compute.manager [req-06c33fa6-a81c-42c4-b6f3-b2b04a0d5887 req-12e1865f-9c5d-4af7-9078-5f6fe805911a service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Refreshing instance network info cache due to event network-changed-d469619d-b568-437d-8023-8d02e02b7350. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1665.695528] env[63297]: DEBUG oslo_concurrency.lockutils [req-06c33fa6-a81c-42c4-b6f3-b2b04a0d5887 req-12e1865f-9c5d-4af7-9078-5f6fe805911a service nova] Acquiring lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.740553] env[63297]: DEBUG nova.network.neutron [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updating instance_info_cache with network_info: [{"id": "d469619d-b568-437d-8023-8d02e02b7350", "address": "fa:16:3e:4a:1e:b1", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd469619d-b5", "ovs_interfaceid": "d469619d-b568-437d-8023-8d02e02b7350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.963183] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.966014] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.985s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.966419] env[63297]: DEBUG nova.objects.instance [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lazy-loading 'resources' on Instance uuid c89d23b7-deb0-4394-9a42-2ac3990da98d {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.985366] env[63297]: INFO nova.scheduler.client.report [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted allocations for instance c2362520-ed07-4124-aade-bb54830b0d54 [ 1666.077155] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698118, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.152582] env[63297]: DEBUG nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1666.180573] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1666.180849] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1666.180987] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1666.181189] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1666.181374] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1666.181529] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1666.181956] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1666.182036] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1666.182188] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1666.183584] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1666.183584] env[63297]: DEBUG nova.virt.hardware [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1666.183584] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7e3884-1d2c-4c6b-8dfb-a1da6e344a14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.192098] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1d4d22-06f1-4aea-a7a6-03d417729f5d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.243666] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Releasing lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.243666] env[63297]: DEBUG nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Instance network_info: |[{"id": "d469619d-b568-437d-8023-8d02e02b7350", "address": "fa:16:3e:4a:1e:b1", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd469619d-b5", "ovs_interfaceid": "d469619d-b568-437d-8023-8d02e02b7350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1666.243860] env[63297]: DEBUG oslo_concurrency.lockutils [req-06c33fa6-a81c-42c4-b6f3-b2b04a0d5887 req-12e1865f-9c5d-4af7-9078-5f6fe805911a service nova] Acquired lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.243960] env[63297]: DEBUG nova.network.neutron [req-06c33fa6-a81c-42c4-b6f3-b2b04a0d5887 req-12e1865f-9c5d-4af7-9078-5f6fe805911a service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Refreshing network info cache for port d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1666.245264] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:1e:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd469619d-b568-437d-8023-8d02e02b7350', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1666.253700] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Creating folder: Project (d636a91a492a4f538bc2fc8634f5fa14). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1666.256777] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81fcb60a-b4d9-46b5-9552-29fe21887ab4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.269997] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Created folder: Project (d636a91a492a4f538bc2fc8634f5fa14) in parent group-v353718. [ 1666.270221] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Creating folder: Instances. Parent ref: group-v354000. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1666.270457] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6d1dab3-6112-4d17-8d56-7e84a9eeef88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.281152] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Created folder: Instances in parent group-v354000. [ 1666.281422] env[63297]: DEBUG oslo.service.loopingcall [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1666.281671] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1666.281900] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6204c06-97d5-4832-9c05-578e737b0577 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.304669] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1666.304669] env[63297]: value = "task-1698121" [ 1666.304669] env[63297]: _type = "Task" [ 1666.304669] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.313959] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698121, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.494994] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6d7e058e-8318-49ee-9a3f-4210ed2a8bad tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "c2362520-ed07-4124-aade-bb54830b0d54" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.908s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.554484] env[63297]: DEBUG nova.network.neutron [req-06c33fa6-a81c-42c4-b6f3-b2b04a0d5887 req-12e1865f-9c5d-4af7-9078-5f6fe805911a service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updated VIF entry in instance network info cache for port d469619d-b568-437d-8023-8d02e02b7350. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1666.554849] env[63297]: DEBUG nova.network.neutron [req-06c33fa6-a81c-42c4-b6f3-b2b04a0d5887 req-12e1865f-9c5d-4af7-9078-5f6fe805911a service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updating instance_info_cache with network_info: [{"id": "d469619d-b568-437d-8023-8d02e02b7350", "address": "fa:16:3e:4a:1e:b1", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd469619d-b5", "ovs_interfaceid": "d469619d-b568-437d-8023-8d02e02b7350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.583105] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698118, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63831} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.583725] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22/f87867c3-58d4-4bd6-b6ef-1608ebef6b22.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1666.583993] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1666.587018] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02b3e88d-0e4f-41b6-bb4e-80b30725f2ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.592100] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1666.592100] env[63297]: value = "task-1698122" [ 1666.592100] env[63297]: _type = "Task" [ 1666.592100] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.605746] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698122, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.779441] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6fc488-a507-4b2a-9c53-a8a531dc8914 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.788343] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc76f17-2f0b-42db-bfbb-5be77bc99299 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.835755] env[63297]: DEBUG nova.network.neutron [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Successfully updated port: 51feb81a-d695-4671-800d-b58470af4ae2 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1666.841156] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40358284-a439-42cd-9248-e14637ce61e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.845303] env[63297]: DEBUG nova.network.neutron [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [{"id": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "address": "fa:16:3e:23:ee:4c", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ab49-f8", "ovs_interfaceid": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.853787] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698121, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.855053] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85c2a92-da4d-4de0-90a7-a7fb2cbee277 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.870039] env[63297]: DEBUG nova.compute.provider_tree [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.060436] env[63297]: DEBUG oslo_concurrency.lockutils [req-06c33fa6-a81c-42c4-b6f3-b2b04a0d5887 req-12e1865f-9c5d-4af7-9078-5f6fe805911a service nova] Releasing lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.102658] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698122, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067988} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.102963] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1667.103773] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ccb44f-57c8-4ca4-ac5e-71579cc0e10a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.130295] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22/f87867c3-58d4-4bd6-b6ef-1608ebef6b22.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1667.131431] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b5e06eb-0891-4875-8d01-9b54c42f4eea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.131673] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b5e06eb-0891-4875-8d01-9b54c42f4eea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.131978] env[63297]: DEBUG nova.objects.instance [None req-0b5e06eb-0891-4875-8d01-9b54c42f4eea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'flavor' on Instance uuid 66b7a1e5-5e74-49db-99f3-4427d7297bf2 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1667.133323] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93e82125-a088-4593-bb55-8b6ea9442651 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.149499] env[63297]: DEBUG nova.objects.instance [None req-0b5e06eb-0891-4875-8d01-9b54c42f4eea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'pci_requests' on Instance uuid 66b7a1e5-5e74-49db-99f3-4427d7297bf2 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1667.156922] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1667.156922] env[63297]: value = "task-1698123" [ 1667.156922] env[63297]: _type = "Task" [ 1667.156922] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.167391] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698123, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.345469] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.345622] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.345775] env[63297]: DEBUG nova.network.neutron [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1667.346949] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698121, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.349043] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.349043] env[63297]: DEBUG nova.objects.instance [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'migration_context' on Instance uuid b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1667.373485] env[63297]: DEBUG nova.scheduler.client.report [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1667.435393] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.435787] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.652444] env[63297]: DEBUG nova.objects.base [None req-0b5e06eb-0891-4875-8d01-9b54c42f4eea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Object Instance<66b7a1e5-5e74-49db-99f3-4427d7297bf2> lazy-loaded attributes: flavor,pci_requests {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1667.652788] env[63297]: DEBUG nova.network.neutron [None req-0b5e06eb-0891-4875-8d01-9b54c42f4eea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1667.666425] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698123, 'name': ReconfigVM_Task, 'duration_secs': 0.44683} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.666662] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfigured VM instance instance-00000065 to attach disk [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22/f87867c3-58d4-4bd6-b6ef-1608ebef6b22.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1667.667317] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23ea216a-20ed-4df8-a218-31d797483cd9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.674040] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1667.674040] env[63297]: value = "task-1698124" [ 1667.674040] env[63297]: _type = "Task" [ 1667.674040] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.684018] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698124, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.777640] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0b5e06eb-0891-4875-8d01-9b54c42f4eea tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 0.646s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.850398] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698121, 'name': CreateVM_Task, 'duration_secs': 1.207265} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.851141] env[63297]: DEBUG nova.objects.base [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1667.851372] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1667.852484] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840aa33e-1d29-4129-bc57-b807feeb2a00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.855523] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.855687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.856541] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1667.856828] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c355cd55-66c5-4bdd-b0f3-ce85ad82f5cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.861861] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1667.861861] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52141a32-f41c-4672-826e-c5bf5849ddd0" [ 1667.861861] env[63297]: _type = "Task" [ 1667.861861] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.878778] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-346f6d7c-343b-49f8-9424-a2c4fb04944e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.883619] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.885722] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.147s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.885914] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.887588] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.846s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.887810] env[63297]: DEBUG nova.objects.instance [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lazy-loading 'resources' on Instance uuid d70bfe65-5faa-4248-9119-9a38259cb418 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1667.896603] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52141a32-f41c-4672-826e-c5bf5849ddd0, 'name': SearchDatastore_Task, 'duration_secs': 0.015972} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.897474] env[63297]: DEBUG nova.network.neutron [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1667.900741] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.901013] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1667.901230] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.901378] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.901559] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1667.901957] env[63297]: DEBUG oslo_vmware.api [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1667.901957] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fd90ae-c5ec-fd95-ef78-6b8347652bef" [ 1667.901957] env[63297]: _type = "Task" [ 1667.901957] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.902857] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b201d1f-302c-4b8c-983a-b6ffb42f9d29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.910414] env[63297]: DEBUG nova.compute.manager [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Received event network-vif-plugged-51feb81a-d695-4671-800d-b58470af4ae2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1667.910639] env[63297]: DEBUG oslo_concurrency.lockutils [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] Acquiring lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.911357] env[63297]: DEBUG oslo_concurrency.lockutils [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.911357] env[63297]: DEBUG oslo_concurrency.lockutils [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.911357] env[63297]: DEBUG nova.compute.manager [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] No waiting events found dispatching network-vif-plugged-51feb81a-d695-4671-800d-b58470af4ae2 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1667.911357] env[63297]: WARNING nova.compute.manager [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Received unexpected event network-vif-plugged-51feb81a-d695-4671-800d-b58470af4ae2 for instance with vm_state building and task_state spawning. [ 1667.911584] env[63297]: DEBUG nova.compute.manager [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Received event network-changed-51feb81a-d695-4671-800d-b58470af4ae2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1667.911640] env[63297]: DEBUG nova.compute.manager [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Refreshing instance network info cache due to event network-changed-51feb81a-d695-4671-800d-b58470af4ae2. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1667.911803] env[63297]: DEBUG oslo_concurrency.lockutils [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] Acquiring lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.918708] env[63297]: DEBUG oslo_vmware.api [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fd90ae-c5ec-fd95-ef78-6b8347652bef, 'name': SearchDatastore_Task, 'duration_secs': 0.010024} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.919627] env[63297]: INFO nova.scheduler.client.report [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted allocations for instance c89d23b7-deb0-4394-9a42-2ac3990da98d [ 1667.924302] env[63297]: INFO nova.scheduler.client.report [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted allocations for instance a6d86e78-ae24-4e70-9fb2-270177b40322 [ 1667.927420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.932234] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1667.932234] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1667.932743] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b3ae620-2023-4263-bf80-79babcd34473 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.938492] env[63297]: DEBUG nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1667.941026] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1667.941026] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5229b185-a03e-01c7-5bc3-d11d7f3ba8b9" [ 1667.941026] env[63297]: _type = "Task" [ 1667.941026] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.950793] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5229b185-a03e-01c7-5bc3-d11d7f3ba8b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.144111] env[63297]: DEBUG nova.network.neutron [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance_info_cache with network_info: [{"id": "51feb81a-d695-4671-800d-b58470af4ae2", "address": "fa:16:3e:55:69:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51feb81a-d6", "ovs_interfaceid": "51feb81a-d695-4671-800d-b58470af4ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.183810] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698124, 'name': Rename_Task, 'duration_secs': 0.186402} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.184253] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1668.184414] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-932f121a-f3dd-4f57-9e9f-da7b69312cf2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.190620] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1668.190620] env[63297]: value = "task-1698125" [ 1668.190620] env[63297]: _type = "Task" [ 1668.190620] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.198192] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.426319] env[63297]: DEBUG oslo_concurrency.lockutils [None req-aca660cd-8228-41b0-941f-f89e83382415 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "c89d23b7-deb0-4394-9a42-2ac3990da98d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.316s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.435516] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71391369-b277-4a21-b6dd-b8e35808cbc8 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "a6d86e78-ae24-4e70-9fb2-270177b40322" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.635s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.460086] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5229b185-a03e-01c7-5bc3-d11d7f3ba8b9, 'name': SearchDatastore_Task, 'duration_secs': 0.013472} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.461126] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4fdcb44-5daa-4388-9e4e-2f76b2b2f2e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.464536] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.467651] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1668.467651] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527cc243-068d-e4f0-28a1-4d3c1fce0bb8" [ 1668.467651] env[63297]: _type = "Task" [ 1668.467651] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.478062] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527cc243-068d-e4f0-28a1-4d3c1fce0bb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.600390] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b72d84-c14c-44dc-8bf6-755c41348d29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.608403] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872429a7-797c-4b01-8524-78002904c765 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.640533] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55532833-211a-40fa-a7ae-29e7f4eae378 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.647636] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.647944] env[63297]: DEBUG nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Instance network_info: |[{"id": "51feb81a-d695-4671-800d-b58470af4ae2", "address": "fa:16:3e:55:69:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51feb81a-d6", "ovs_interfaceid": "51feb81a-d695-4671-800d-b58470af4ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1668.648334] env[63297]: DEBUG oslo_concurrency.lockutils [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] Acquired lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.648517] env[63297]: DEBUG nova.network.neutron [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Refreshing network info cache for port 51feb81a-d695-4671-800d-b58470af4ae2 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1668.649675] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:69:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51feb81a-d695-4671-800d-b58470af4ae2', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1668.657012] env[63297]: DEBUG oslo.service.loopingcall [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1668.658194] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1841e380-0a94-425e-b2aa-a6991ede64b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.663056] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1668.663289] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40318db2-dc1e-412e-a741-2b4149705793 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.687197] env[63297]: DEBUG nova.compute.provider_tree [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1668.689605] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1668.689605] env[63297]: value = "task-1698126" [ 1668.689605] env[63297]: _type = "Task" [ 1668.689605] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.704633] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698125, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.704633] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698126, 'name': CreateVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.978492] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527cc243-068d-e4f0-28a1-4d3c1fce0bb8, 'name': SearchDatastore_Task, 'duration_secs': 0.016825} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.978833] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.979165] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 3ab25962-2150-4331-a018-aa61bd082814/3ab25962-2150-4331-a018-aa61bd082814.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1668.979444] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0997f00-a68a-409a-b3da-1667838b3710 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.987774] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1668.987774] env[63297]: value = "task-1698127" [ 1668.987774] env[63297]: _type = "Task" [ 1668.987774] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.996584] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.191310] env[63297]: DEBUG nova.scheduler.client.report [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1669.216268] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698126, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.216580] env[63297]: DEBUG oslo_vmware.api [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698125, 'name': PowerOnVM_Task, 'duration_secs': 0.684109} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.216836] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1669.217048] env[63297]: INFO nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Took 7.82 seconds to spawn the instance on the hypervisor. [ 1669.217229] env[63297]: DEBUG nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1669.218251] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41288eff-c2c5-4970-8f3e-b49b51065785 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.498316] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698127, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.533080] env[63297]: DEBUG nova.network.neutron [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updated VIF entry in instance network info cache for port 51feb81a-d695-4671-800d-b58470af4ae2. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1669.533461] env[63297]: DEBUG nova.network.neutron [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance_info_cache with network_info: [{"id": "51feb81a-d695-4671-800d-b58470af4ae2", "address": "fa:16:3e:55:69:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51feb81a-d6", "ovs_interfaceid": "51feb81a-d695-4671-800d-b58470af4ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.653388] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.653738] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.654685] env[63297]: DEBUG nova.objects.instance [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'flavor' on Instance uuid 66b7a1e5-5e74-49db-99f3-4427d7297bf2 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.702192] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.814s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.708297] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.781s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.710264] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698126, 'name': CreateVM_Task, 'duration_secs': 0.643296} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.710689] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1669.711452] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.711659] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.711994] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1669.712358] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f3aa88a-3ba2-4f7d-9457-1e0ace7a989e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.718110] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1669.718110] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527f642d-d272-b544-2957-f194cbc95e84" [ 1669.718110] env[63297]: _type = "Task" [ 1669.718110] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.728347] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527f642d-d272-b544-2957-f194cbc95e84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.729663] env[63297]: INFO nova.scheduler.client.report [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Deleted allocations for instance d70bfe65-5faa-4248-9119-9a38259cb418 [ 1669.738999] env[63297]: INFO nova.compute.manager [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Took 13.84 seconds to build instance. [ 1669.999388] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.914343} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.999560] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 3ab25962-2150-4331-a018-aa61bd082814/3ab25962-2150-4331-a018-aa61bd082814.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1669.999825] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1669.999960] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f010b47c-ac1d-4897-9109-a0d0487cddc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.006991] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1670.006991] env[63297]: value = "task-1698128" [ 1670.006991] env[63297]: _type = "Task" [ 1670.006991] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.015776] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.035774] env[63297]: DEBUG oslo_concurrency.lockutils [req-84ed0aa5-0de6-43f0-b76b-7ecc7d5cada2 req-7d0ea4c8-721a-4fd0-9687-198416b301c8 service nova] Releasing lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.230766] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527f642d-d272-b544-2957-f194cbc95e84, 'name': SearchDatastore_Task, 'duration_secs': 0.050269} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.231229] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.231322] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1670.231540] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.231684] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.231893] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1670.232176] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1d50962-a3b6-4209-999e-7f73faacffd9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.241664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.241924] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.245141] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bc78b700-c1a9-45fb-a665-0857eb2eee62 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.356s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.245540] env[63297]: DEBUG oslo_concurrency.lockutils [None req-974bb023-13a4-466e-a3ed-0d6beac74143 tempest-ServerShowV254Test-500090806 tempest-ServerShowV254Test-500090806-project-member] Lock "d70bfe65-5faa-4248-9119-9a38259cb418" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.935s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.251461] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1670.251642] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1670.252811] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2db40fc5-0df5-401f-95f7-cef933a032e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.266426] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1670.266426] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526025f7-448a-7dc1-ce3c-2fb021254589" [ 1670.266426] env[63297]: _type = "Task" [ 1670.266426] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.281062] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526025f7-448a-7dc1-ce3c-2fb021254589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.410667] env[63297]: DEBUG nova.objects.instance [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'pci_requests' on Instance uuid 66b7a1e5-5e74-49db-99f3-4427d7297bf2 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1670.452807] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec236d9-9572-4f3b-9b3a-d181682bb4b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.461855] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e23764-f34f-4ab3-bbf0-b3470ee0b0ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.493039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d10d6c-3e58-4361-a0b5-745c8fc720d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.501493] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444a7d0d-da10-4b8a-8bcc-82e2b0469b76 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.518890] env[63297]: DEBUG nova.compute.provider_tree [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.525177] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063533} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.525399] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1670.526330] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73409861-3bef-4089-87c3-d552c3f04c7c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.551796] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 3ab25962-2150-4331-a018-aa61bd082814/3ab25962-2150-4331-a018-aa61bd082814.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1670.552374] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66e193be-c832-4dad-957c-7ecbbdf9f024 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.574020] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1670.574020] env[63297]: value = "task-1698129" [ 1670.574020] env[63297]: _type = "Task" [ 1670.574020] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.582071] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698129, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.737880] env[63297]: DEBUG nova.compute.manager [req-53254a5f-8e7b-4868-a241-64e48531e994 req-1c8a9fa7-b69a-42ba-a483-d636842f75d8 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Received event network-changed-8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1670.738093] env[63297]: DEBUG nova.compute.manager [req-53254a5f-8e7b-4868-a241-64e48531e994 req-1c8a9fa7-b69a-42ba-a483-d636842f75d8 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Refreshing instance network info cache due to event network-changed-8b0d7883-16da-4bdb-b728-dbcd6772ccdb. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1670.738308] env[63297]: DEBUG oslo_concurrency.lockutils [req-53254a5f-8e7b-4868-a241-64e48531e994 req-1c8a9fa7-b69a-42ba-a483-d636842f75d8 service nova] Acquiring lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.738451] env[63297]: DEBUG oslo_concurrency.lockutils [req-53254a5f-8e7b-4868-a241-64e48531e994 req-1c8a9fa7-b69a-42ba-a483-d636842f75d8 service nova] Acquired lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.738608] env[63297]: DEBUG nova.network.neutron [req-53254a5f-8e7b-4868-a241-64e48531e994 req-1c8a9fa7-b69a-42ba-a483-d636842f75d8 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Refreshing network info cache for port 8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1670.746918] env[63297]: DEBUG nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1670.777226] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526025f7-448a-7dc1-ce3c-2fb021254589, 'name': SearchDatastore_Task, 'duration_secs': 0.030723} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.778081] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cf9c369-e6bc-481e-ba17-8fea66c0d2d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.784391] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1670.784391] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527975c4-d49e-959e-6f94-7d17c365a51f" [ 1670.784391] env[63297]: _type = "Task" [ 1670.784391] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.793080] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527975c4-d49e-959e-6f94-7d17c365a51f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.913388] env[63297]: DEBUG nova.objects.base [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Object Instance<66b7a1e5-5e74-49db-99f3-4427d7297bf2> lazy-loaded attributes: flavor,pci_requests {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1670.913590] env[63297]: DEBUG nova.network.neutron [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1670.966163] env[63297]: DEBUG nova.policy [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1671.023194] env[63297]: DEBUG nova.scheduler.client.report [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1671.083328] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698129, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.272211] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.297045] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527975c4-d49e-959e-6f94-7d17c365a51f, 'name': SearchDatastore_Task, 'duration_secs': 0.041548} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.297337] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.297596] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d7dc0672-c908-418e-bfcb-8daa761fba37/d7dc0672-c908-418e-bfcb-8daa761fba37.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1671.297910] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33c42e22-e83b-4002-84b7-37cfc8a23581 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.304821] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1671.304821] env[63297]: value = "task-1698130" [ 1671.304821] env[63297]: _type = "Task" [ 1671.304821] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.313729] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.423961] env[63297]: DEBUG nova.network.neutron [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Successfully created port: a5af9937-1640-481b-b998-9090c09fa6e0 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1671.535824] env[63297]: DEBUG oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a343ac-bf5f-bd20-7b52-c6bc0ae6f03c/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1671.537536] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839e26d2-ec45-4365-9ed0-8ddbeb374ce9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.544213] env[63297]: DEBUG oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a343ac-bf5f-bd20-7b52-c6bc0ae6f03c/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1671.544213] env[63297]: ERROR oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a343ac-bf5f-bd20-7b52-c6bc0ae6f03c/disk-0.vmdk due to incomplete transfer. [ 1671.544213] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f5d154f9-fdd5-4203-bed4-435366b903eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.554408] env[63297]: DEBUG oslo_vmware.rw_handles [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a343ac-bf5f-bd20-7b52-c6bc0ae6f03c/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1671.555816] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Uploaded image 6d9483fb-3122-4c2b-800b-dca528822bb1 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1671.557781] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1671.558146] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b760037e-3849-4fcc-a99a-df81b822eca7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.568249] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1671.568249] env[63297]: value = "task-1698131" [ 1671.568249] env[63297]: _type = "Task" [ 1671.568249] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.583460] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698129, 'name': ReconfigVM_Task, 'duration_secs': 0.87053} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.586619] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 3ab25962-2150-4331-a018-aa61bd082814/3ab25962-2150-4331-a018-aa61bd082814.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1671.587414] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698131, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.587696] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-219c0a8a-47c4-4b73-8879-4f8cadea508d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.595963] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1671.595963] env[63297]: value = "task-1698132" [ 1671.595963] env[63297]: _type = "Task" [ 1671.595963] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.606057] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698132, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.750364] env[63297]: DEBUG nova.network.neutron [req-53254a5f-8e7b-4868-a241-64e48531e994 req-1c8a9fa7-b69a-42ba-a483-d636842f75d8 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Updated VIF entry in instance network info cache for port 8b0d7883-16da-4bdb-b728-dbcd6772ccdb. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1671.750761] env[63297]: DEBUG nova.network.neutron [req-53254a5f-8e7b-4868-a241-64e48531e994 req-1c8a9fa7-b69a-42ba-a483-d636842f75d8 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Updating instance_info_cache with network_info: [{"id": "8b0d7883-16da-4bdb-b728-dbcd6772ccdb", "address": "fa:16:3e:b7:04:f1", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0d7883-16", "ovs_interfaceid": "8b0d7883-16da-4bdb-b728-dbcd6772ccdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.821526] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698130, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.042654] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.333s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.042654] env[63297]: DEBUG nova.compute.manager [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63297) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 1672.044964] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.580s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.046840] env[63297]: INFO nova.compute.claims [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1672.079315] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698131, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.106091] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698132, 'name': Rename_Task, 'duration_secs': 0.343704} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.106369] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1672.106612] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e0f0144-cd18-4e8f-938c-a1332be41d59 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.113703] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1672.113703] env[63297]: value = "task-1698133" [ 1672.113703] env[63297]: _type = "Task" [ 1672.113703] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.122901] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.254156] env[63297]: DEBUG oslo_concurrency.lockutils [req-53254a5f-8e7b-4868-a241-64e48531e994 req-1c8a9fa7-b69a-42ba-a483-d636842f75d8 service nova] Releasing lock "refresh_cache-f87867c3-58d4-4bd6-b6ef-1608ebef6b22" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.316586] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534083} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.316911] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d7dc0672-c908-418e-bfcb-8daa761fba37/d7dc0672-c908-418e-bfcb-8daa761fba37.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1672.317072] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1672.317326] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77060abc-d4b7-4c7f-b674-82ee6620bbe8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.324966] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1672.324966] env[63297]: value = "task-1698134" [ 1672.324966] env[63297]: _type = "Task" [ 1672.324966] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.332837] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.580616] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698131, 'name': Destroy_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.608061] env[63297]: INFO nova.scheduler.client.report [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted allocation for migration a3981554-81b6-4a57-8746-6c355d8fcc2f [ 1672.624039] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.835032] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698134, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062883} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.835032] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1672.835634] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0a7335-55d3-4c42-ab6d-a8763e4f635e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.857740] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] d7dc0672-c908-418e-bfcb-8daa761fba37/d7dc0672-c908-418e-bfcb-8daa761fba37.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1672.858095] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9859961a-6b6f-443e-94fe-745bc9ee32c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.877323] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1672.877323] env[63297]: value = "task-1698135" [ 1672.877323] env[63297]: _type = "Task" [ 1672.877323] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.884752] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698135, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.082922] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698131, 'name': Destroy_Task, 'duration_secs': 1.192854} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.083539] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Destroyed the VM [ 1673.083802] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1673.084130] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a1435601-7d76-406d-9f44-02516589402a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.092235] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1673.092235] env[63297]: value = "task-1698136" [ 1673.092235] env[63297]: _type = "Task" [ 1673.092235] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.103487] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698136, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.114772] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ff162341-1865-4a55-9747-08f6483bad46 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.333s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.119237] env[63297]: DEBUG nova.compute.manager [req-40173789-bb0e-49c1-9504-6462fb9bce60 req-a8d2204f-d25c-4d2f-af66-83c61faab4f1 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-vif-plugged-a5af9937-1640-481b-b998-9090c09fa6e0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1673.119237] env[63297]: DEBUG oslo_concurrency.lockutils [req-40173789-bb0e-49c1-9504-6462fb9bce60 req-a8d2204f-d25c-4d2f-af66-83c61faab4f1 service nova] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.119237] env[63297]: DEBUG oslo_concurrency.lockutils [req-40173789-bb0e-49c1-9504-6462fb9bce60 req-a8d2204f-d25c-4d2f-af66-83c61faab4f1 service nova] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.119237] env[63297]: DEBUG oslo_concurrency.lockutils [req-40173789-bb0e-49c1-9504-6462fb9bce60 req-a8d2204f-d25c-4d2f-af66-83c61faab4f1 service nova] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.119237] env[63297]: DEBUG nova.compute.manager [req-40173789-bb0e-49c1-9504-6462fb9bce60 req-a8d2204f-d25c-4d2f-af66-83c61faab4f1 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] No waiting events found dispatching network-vif-plugged-a5af9937-1640-481b-b998-9090c09fa6e0 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1673.119237] env[63297]: WARNING nova.compute.manager [req-40173789-bb0e-49c1-9504-6462fb9bce60 req-a8d2204f-d25c-4d2f-af66-83c61faab4f1 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received unexpected event network-vif-plugged-a5af9937-1640-481b-b998-9090c09fa6e0 for instance with vm_state active and task_state None. [ 1673.135374] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698133, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.259143] env[63297]: DEBUG nova.network.neutron [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Successfully updated port: a5af9937-1640-481b-b998-9090c09fa6e0 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1673.311100] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c1d9f6-5ec5-4211-bb69-04e06ec1fd1e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.319635] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c75d38-9a58-4497-9785-5615ae74df8a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.352307] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0e5e3c-1ed7-4c47-9831-caeadc08fc5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.360175] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1ed1c4-d541-4b3a-8dda-a9fef433ef57 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.374543] env[63297]: DEBUG nova.compute.provider_tree [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1673.387474] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698135, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.605158] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698136, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.637979] env[63297]: DEBUG oslo_vmware.api [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698133, 'name': PowerOnVM_Task, 'duration_secs': 1.392338} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.638285] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1673.638489] env[63297]: INFO nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Took 9.89 seconds to spawn the instance on the hypervisor. [ 1673.638665] env[63297]: DEBUG nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1673.639524] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5d4ff1-ec6f-49cc-a7da-b8896b507643 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.762600] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.762815] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.763094] env[63297]: DEBUG nova.network.neutron [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1673.877964] env[63297]: DEBUG nova.scheduler.client.report [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1673.890647] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698135, 'name': ReconfigVM_Task, 'duration_secs': 0.538173} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.890873] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Reconfigured VM instance instance-00000067 to attach disk [datastore1] d7dc0672-c908-418e-bfcb-8daa761fba37/d7dc0672-c908-418e-bfcb-8daa761fba37.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1673.891405] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70042039-e124-49a6-8b77-6768a17166b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.898899] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1673.898899] env[63297]: value = "task-1698137" [ 1673.898899] env[63297]: _type = "Task" [ 1673.898899] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.910010] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698137, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.103747] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698136, 'name': RemoveSnapshot_Task, 'duration_secs': 0.587515} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.104025] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1674.104322] env[63297]: DEBUG nova.compute.manager [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1674.105125] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f742ed-131b-4855-9371-e7e1b2bcb050 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.136055] env[63297]: DEBUG nova.objects.instance [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'flavor' on Instance uuid b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1674.158343] env[63297]: INFO nova.compute.manager [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Took 17.08 seconds to build instance. [ 1674.386289] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.386830] env[63297]: DEBUG nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1674.389374] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.117s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.390768] env[63297]: INFO nova.compute.claims [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1674.410011] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698137, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.458471] env[63297]: WARNING nova.network.neutron [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] 250ab20f-3057-41ed-bb65-926464a4e926 already exists in list: networks containing: ['250ab20f-3057-41ed-bb65-926464a4e926']. ignoring it [ 1674.619140] env[63297]: INFO nova.compute.manager [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Shelve offloading [ 1674.620804] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1674.621075] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5a59bfb-1cb4-4b08-af25-bec3a1b4d12d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.628732] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1674.628732] env[63297]: value = "task-1698138" [ 1674.628732] env[63297]: _type = "Task" [ 1674.628732] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.636433] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.639603] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.639795] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.639924] env[63297]: DEBUG nova.network.neutron [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1674.640111] env[63297]: DEBUG nova.objects.instance [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'info_cache' on Instance uuid b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1674.661253] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9158656-cbec-4e4f-8d39-087981e95334 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "3ab25962-2150-4331-a018-aa61bd082814" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.595s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.836519] env[63297]: DEBUG nova.network.neutron [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a5af9937-1640-481b-b998-9090c09fa6e0", "address": "fa:16:3e:f1:dd:17", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5af9937-16", "ovs_interfaceid": "a5af9937-1640-481b-b998-9090c09fa6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.894525] env[63297]: DEBUG nova.compute.utils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1674.897628] env[63297]: DEBUG nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1674.897798] env[63297]: DEBUG nova.network.neutron [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1674.910768] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698137, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.962461] env[63297]: DEBUG nova.policy [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0da2fdb3c81747698f971951c5e0068b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc8039a70b34a269d3aed1ecb558b7e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1675.139499] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1675.139708] env[63297]: DEBUG nova.compute.manager [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1675.140530] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f501b068-91a2-43fc-83f4-ab7a3f494b2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.144293] env[63297]: DEBUG nova.objects.base [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1675.148650] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.148809] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.148983] env[63297]: DEBUG nova.network.neutron [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1675.339087] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.339843] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.339999] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.340860] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545c98b8-7845-484d-a965-dbd450b923d4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.366454] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1675.366454] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1675.366454] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.366734] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1675.366784] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.366895] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1675.367109] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1675.367263] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1675.367422] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1675.367579] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1675.367749] env[63297]: DEBUG nova.virt.hardware [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1675.375656] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Reconfiguring VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1675.375875] env[63297]: DEBUG nova.network.neutron [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Successfully created port: dc0af285-6a18-4cb7-b669-1b9a78865789 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1675.378246] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-007657f7-4268-4a9f-96ab-095f329a2ad8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.396430] env[63297]: DEBUG oslo_vmware.api [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1675.396430] env[63297]: value = "task-1698139" [ 1675.396430] env[63297]: _type = "Task" [ 1675.396430] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.403114] env[63297]: DEBUG nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1675.409210] env[63297]: DEBUG oslo_vmware.api [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698139, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.418594] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698137, 'name': Rename_Task, 'duration_secs': 1.391278} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.418854] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1675.419103] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d3f8084-100a-4745-ae64-2b62c89857fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.425570] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1675.425570] env[63297]: value = "task-1698140" [ 1675.425570] env[63297]: _type = "Task" [ 1675.425570] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.437128] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.573399] env[63297]: DEBUG nova.compute.manager [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-changed-a5af9937-1640-481b-b998-9090c09fa6e0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1675.573600] env[63297]: DEBUG nova.compute.manager [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Refreshing instance network info cache due to event network-changed-a5af9937-1640-481b-b998-9090c09fa6e0. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1675.573784] env[63297]: DEBUG oslo_concurrency.lockutils [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] Acquiring lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.573943] env[63297]: DEBUG oslo_concurrency.lockutils [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] Acquired lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.574319] env[63297]: DEBUG nova.network.neutron [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Refreshing network info cache for port a5af9937-1640-481b-b998-9090c09fa6e0 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1675.669404] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9440fa0-6218-45a1-9541-72be2c9c1900 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.678359] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5852c76-8b95-4426-b51a-72ada7ef05e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.719014] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c482661c-678f-4753-8fbb-dc1134d0e000 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.728344] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a57bfad-2513-4801-a8d2-cd98897fade8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.742458] env[63297]: DEBUG nova.compute.provider_tree [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.913723] env[63297]: DEBUG oslo_vmware.api [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.926995] env[63297]: DEBUG nova.network.neutron [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [{"id": "bb862c99-f006-416a-9b98-0fb287a5d194", "address": "fa:16:3e:03:4f:8d", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb862c99-f0", "ovs_interfaceid": "bb862c99-f006-416a-9b98-0fb287a5d194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.939610] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698140, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.089676] env[63297]: DEBUG nova.network.neutron [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [{"id": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "address": "fa:16:3e:23:ee:4c", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ab49-f8", "ovs_interfaceid": "c8c6ab49-f882-4349-bddd-cfb1a972afc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.246911] env[63297]: DEBUG nova.scheduler.client.report [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1676.302934] env[63297]: DEBUG nova.network.neutron [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updated VIF entry in instance network info cache for port a5af9937-1640-481b-b998-9090c09fa6e0. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1676.303433] env[63297]: DEBUG nova.network.neutron [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a5af9937-1640-481b-b998-9090c09fa6e0", "address": "fa:16:3e:f1:dd:17", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5af9937-16", "ovs_interfaceid": "a5af9937-1640-481b-b998-9090c09fa6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.410914] env[63297]: DEBUG oslo_vmware.api [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698139, 'name': ReconfigVM_Task, 'duration_secs': 0.63817} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.411490] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.411712] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Reconfigured VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1676.416113] env[63297]: DEBUG nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1676.435374] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.441753] env[63297]: DEBUG oslo_vmware.api [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698140, 'name': PowerOnVM_Task, 'duration_secs': 0.553815} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.442345] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1676.442459] env[63297]: INFO nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Took 10.29 seconds to spawn the instance on the hypervisor. [ 1676.442637] env[63297]: DEBUG nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1676.443510] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25bf1047-dfa2-4355-9261-366bdbc1b4b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.448085] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1676.448303] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1676.448464] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1676.448647] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1676.448793] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1676.448941] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1676.449157] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1676.449312] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1676.449471] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1676.449626] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1676.449789] env[63297]: DEBUG nova.virt.hardware [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1676.450875] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08106e19-37e4-439b-a98f-3a92594ef3fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.462870] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbf833f-0071-49e7-82d9-e47bf0942ad3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.593253] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-b1ed5d76-d358-49d3-a854-8f968bc987ad" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.750873] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.751424] env[63297]: DEBUG nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1676.806050] env[63297]: DEBUG oslo_concurrency.lockutils [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] Releasing lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.806433] env[63297]: DEBUG nova.compute.manager [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Received event network-changed-d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1676.806633] env[63297]: DEBUG nova.compute.manager [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Refreshing instance network info cache due to event network-changed-d469619d-b568-437d-8023-8d02e02b7350. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1676.806848] env[63297]: DEBUG oslo_concurrency.lockutils [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] Acquiring lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.806992] env[63297]: DEBUG oslo_concurrency.lockutils [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] Acquired lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.807175] env[63297]: DEBUG nova.network.neutron [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Refreshing network info cache for port d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1676.813226] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1676.814114] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc27b7d-9ad7-4dbf-a8b2-28db70c8773f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.822490] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1676.822739] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a89231a7-5824-4492-92f6-dcffe174b16f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.909683] env[63297]: DEBUG nova.compute.manager [req-46e57adc-150a-4b45-83c2-2fee02499aec req-44f50fea-19c5-4b9a-b9a6-08aec6c24642 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received event network-vif-unplugged-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1676.909908] env[63297]: DEBUG oslo_concurrency.lockutils [req-46e57adc-150a-4b45-83c2-2fee02499aec req-44f50fea-19c5-4b9a-b9a6-08aec6c24642 service nova] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.910160] env[63297]: DEBUG oslo_concurrency.lockutils [req-46e57adc-150a-4b45-83c2-2fee02499aec req-44f50fea-19c5-4b9a-b9a6-08aec6c24642 service nova] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.910381] env[63297]: DEBUG oslo_concurrency.lockutils [req-46e57adc-150a-4b45-83c2-2fee02499aec req-44f50fea-19c5-4b9a-b9a6-08aec6c24642 service nova] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.910568] env[63297]: DEBUG nova.compute.manager [req-46e57adc-150a-4b45-83c2-2fee02499aec req-44f50fea-19c5-4b9a-b9a6-08aec6c24642 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] No waiting events found dispatching network-vif-unplugged-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1676.910770] env[63297]: WARNING nova.compute.manager [req-46e57adc-150a-4b45-83c2-2fee02499aec req-44f50fea-19c5-4b9a-b9a6-08aec6c24642 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received unexpected event network-vif-unplugged-bb862c99-f006-416a-9b98-0fb287a5d194 for instance with vm_state shelved and task_state shelving_offloading. [ 1676.912118] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1676.912323] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1676.912500] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleting the datastore file [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1676.912742] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa0563f9-b612-4ad1-a9b9-3919ac3efdb1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.917141] env[63297]: DEBUG oslo_concurrency.lockutils [None req-6f47c693-7f31-4bcb-a59d-6648ee3138e6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.263s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.920193] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1676.920193] env[63297]: value = "task-1698142" [ 1676.920193] env[63297]: _type = "Task" [ 1676.920193] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.928951] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698142, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.972781] env[63297]: INFO nova.compute.manager [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Took 18.70 seconds to build instance. [ 1677.096312] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1677.096627] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a237a64f-a68c-41fb-a8f3-54aa97caf9b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.104922] env[63297]: DEBUG oslo_vmware.api [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1677.104922] env[63297]: value = "task-1698143" [ 1677.104922] env[63297]: _type = "Task" [ 1677.104922] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.113641] env[63297]: DEBUG oslo_vmware.api [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698143, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.256746] env[63297]: DEBUG nova.compute.utils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1677.258265] env[63297]: DEBUG nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1677.258344] env[63297]: DEBUG nova.network.neutron [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1677.261550] env[63297]: DEBUG nova.network.neutron [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Successfully updated port: dc0af285-6a18-4cb7-b669-1b9a78865789 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1677.304196] env[63297]: DEBUG nova.policy [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20a91144677b4efba8ab91acd53d1c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33733e0599840618625ecb3e6bb6029', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1677.430121] env[63297]: DEBUG oslo_vmware.api [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698142, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.381612} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.430429] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1677.430551] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1677.430719] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1677.455652] env[63297]: INFO nova.scheduler.client.report [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted allocations for instance 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 [ 1677.475444] env[63297]: DEBUG oslo_concurrency.lockutils [None req-654b565e-50b5-4311-9fd0-07777e18e1a0 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.208s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.570220] env[63297]: DEBUG nova.network.neutron [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Successfully created port: 465bbc4d-f40c-47fa-bc69-986670184c65 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1677.586188] env[63297]: DEBUG nova.network.neutron [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updated VIF entry in instance network info cache for port d469619d-b568-437d-8023-8d02e02b7350. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1677.586188] env[63297]: DEBUG nova.network.neutron [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updating instance_info_cache with network_info: [{"id": "d469619d-b568-437d-8023-8d02e02b7350", "address": "fa:16:3e:4a:1e:b1", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd469619d-b5", "ovs_interfaceid": "d469619d-b568-437d-8023-8d02e02b7350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.615306] env[63297]: DEBUG oslo_vmware.api [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698143, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.761268] env[63297]: DEBUG nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1677.767351] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.767499] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.767648] env[63297]: DEBUG nova.network.neutron [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1677.964204] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.964570] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.964804] env[63297]: DEBUG nova.objects.instance [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'resources' on Instance uuid 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1678.062203] env[63297]: DEBUG nova.compute.manager [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Received event network-changed-d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1678.062463] env[63297]: DEBUG nova.compute.manager [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Refreshing instance network info cache due to event network-changed-d469619d-b568-437d-8023-8d02e02b7350. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1678.062681] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Acquiring lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.089378] env[63297]: DEBUG oslo_concurrency.lockutils [req-9084c02e-703e-4dd3-906e-8d282f025707 req-77c1c230-ae3e-4a70-adc5-525a22ec29a0 service nova] Releasing lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.089820] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Acquired lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.090102] env[63297]: DEBUG nova.network.neutron [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Refreshing network info cache for port d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1678.117223] env[63297]: DEBUG oslo_vmware.api [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698143, 'name': PowerOnVM_Task, 'duration_secs': 0.524457} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.117223] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1678.117223] env[63297]: DEBUG nova.compute.manager [None req-6456686f-59ae-463e-a7f0-4fc67bd2c28a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1678.117704] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2664152b-4481-4252-9d2d-8b616f9e3fa1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.330317] env[63297]: DEBUG nova.network.neutron [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1678.467655] env[63297]: DEBUG nova.objects.instance [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'numa_topology' on Instance uuid 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1678.653233] env[63297]: DEBUG nova.network.neutron [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance_info_cache with network_info: [{"id": "dc0af285-6a18-4cb7-b669-1b9a78865789", "address": "fa:16:3e:f3:6b:12", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc0af285-6a", "ovs_interfaceid": "dc0af285-6a18-4cb7-b669-1b9a78865789", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.695418] env[63297]: INFO nova.compute.manager [None req-2e678c59-f494-4b6f-898a-7d1ff16c0f78 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Get console output [ 1678.695418] env[63297]: WARNING nova.virt.vmwareapi.driver [None req-2e678c59-f494-4b6f-898a-7d1ff16c0f78 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] The console log is missing. Check your VSPC configuration [ 1678.774764] env[63297]: DEBUG nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1678.806058] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1678.806430] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1678.806636] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1678.806859] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1678.807036] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1678.807232] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1678.807498] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1678.808061] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1678.808265] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1678.808438] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1678.808642] env[63297]: DEBUG nova.virt.hardware [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1678.810200] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c886db-afc3-4e7a-8f11-6cb8a56d72de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.826675] env[63297]: DEBUG nova.compute.manager [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1678.831685] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d61ce27-3488-482b-8bd6-5f9a364b935b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.973435] env[63297]: DEBUG nova.objects.base [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Object Instance<427c4ff0-1bf1-4bfb-b5c6-de6659148ab1> lazy-loaded attributes: resources,numa_topology {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1679.002310] env[63297]: DEBUG nova.network.neutron [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updated VIF entry in instance network info cache for port d469619d-b568-437d-8023-8d02e02b7350. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1679.003182] env[63297]: DEBUG nova.network.neutron [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updating instance_info_cache with network_info: [{"id": "d469619d-b568-437d-8023-8d02e02b7350", "address": "fa:16:3e:4a:1e:b1", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd469619d-b5", "ovs_interfaceid": "d469619d-b568-437d-8023-8d02e02b7350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1679.155819] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.156153] env[63297]: DEBUG nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Instance network_info: |[{"id": "dc0af285-6a18-4cb7-b669-1b9a78865789", "address": "fa:16:3e:f3:6b:12", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc0af285-6a", "ovs_interfaceid": "dc0af285-6a18-4cb7-b669-1b9a78865789", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1679.156559] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:6b:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc0af285-6a18-4cb7-b669-1b9a78865789', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1679.165312] env[63297]: DEBUG oslo.service.loopingcall [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1679.167896] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1679.168918] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52a589db-159d-4e29-b326-44b9e62168a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.198964] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1679.198964] env[63297]: value = "task-1698144" [ 1679.198964] env[63297]: _type = "Task" [ 1679.198964] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.205642] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267fc5a9-f32a-4c1c-87f9-8da7e01e4c62 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.219459] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698144, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.225701] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42799a89-3c89-422d-8ae0-387bae6a68ed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.257721] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c8b976-3895-46b6-acfe-4c7e5411e8d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.265496] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df9542f-7976-4dbe-baeb-52390b8a157f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.280013] env[63297]: DEBUG nova.compute.provider_tree [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.299918] env[63297]: DEBUG nova.network.neutron [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Successfully updated port: 465bbc4d-f40c-47fa-bc69-986670184c65 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1679.349813] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.449028] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-bbed31c2-2385-4195-90af-10f04ce61f2f" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.449284] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-bbed31c2-2385-4195-90af-10f04ce61f2f" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.449647] env[63297]: DEBUG nova.objects.instance [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'flavor' on Instance uuid 66b7a1e5-5e74-49db-99f3-4427d7297bf2 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1679.461311] env[63297]: DEBUG nova.compute.manager [req-a6bdb636-4b72-4455-9710-c92f83b30329 req-f6c667bd-129f-44b7-9ed9-347ce579eb1b service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received event network-changed-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1679.461510] env[63297]: DEBUG nova.compute.manager [req-a6bdb636-4b72-4455-9710-c92f83b30329 req-f6c667bd-129f-44b7-9ed9-347ce579eb1b service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Refreshing instance network info cache due to event network-changed-bb862c99-f006-416a-9b98-0fb287a5d194. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1679.461785] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6bdb636-4b72-4455-9710-c92f83b30329 req-f6c667bd-129f-44b7-9ed9-347ce579eb1b service nova] Acquiring lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.461849] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6bdb636-4b72-4455-9710-c92f83b30329 req-f6c667bd-129f-44b7-9ed9-347ce579eb1b service nova] Acquired lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.462061] env[63297]: DEBUG nova.network.neutron [req-a6bdb636-4b72-4455-9710-c92f83b30329 req-f6c667bd-129f-44b7-9ed9-347ce579eb1b service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Refreshing network info cache for port bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1679.506841] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Releasing lock "refresh_cache-3ab25962-2150-4331-a018-aa61bd082814" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.506841] env[63297]: DEBUG nova.compute.manager [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Received event network-vif-plugged-dc0af285-6a18-4cb7-b669-1b9a78865789 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1679.506841] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Acquiring lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.506841] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.507271] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.507271] env[63297]: DEBUG nova.compute.manager [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] No waiting events found dispatching network-vif-plugged-dc0af285-6a18-4cb7-b669-1b9a78865789 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1679.507271] env[63297]: WARNING nova.compute.manager [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Received unexpected event network-vif-plugged-dc0af285-6a18-4cb7-b669-1b9a78865789 for instance with vm_state building and task_state spawning. [ 1679.507441] env[63297]: DEBUG nova.compute.manager [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Received event network-changed-dc0af285-6a18-4cb7-b669-1b9a78865789 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1679.507596] env[63297]: DEBUG nova.compute.manager [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Refreshing instance network info cache due to event network-changed-dc0af285-6a18-4cb7-b669-1b9a78865789. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1679.507879] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Acquiring lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.507952] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Acquired lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.508100] env[63297]: DEBUG nova.network.neutron [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Refreshing network info cache for port dc0af285-6a18-4cb7-b669-1b9a78865789 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1679.692037] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.712297] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698144, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.783654] env[63297]: DEBUG nova.scheduler.client.report [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1679.803971] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "refresh_cache-314c2cd3-6f1d-4d74-ad84-d7cc44375456" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.803971] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "refresh_cache-314c2cd3-6f1d-4d74-ad84-d7cc44375456" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.803971] env[63297]: DEBUG nova.network.neutron [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1680.090459] env[63297]: DEBUG nova.objects.instance [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'pci_requests' on Instance uuid 66b7a1e5-5e74-49db-99f3-4427d7297bf2 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1680.196196] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "96265295-6b0c-4803-bb89-6166c9d3fc7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.196468] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "96265295-6b0c-4803-bb89-6166c9d3fc7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.196676] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "96265295-6b0c-4803-bb89-6166c9d3fc7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.197594] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "96265295-6b0c-4803-bb89-6166c9d3fc7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.197787] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "96265295-6b0c-4803-bb89-6166c9d3fc7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.204265] env[63297]: INFO nova.compute.manager [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Terminating instance [ 1680.206438] env[63297]: DEBUG nova.compute.manager [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1680.206633] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1680.207802] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2fe679-6ae5-482a-ad69-19247d8c79c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.215402] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698144, 'name': CreateVM_Task, 'duration_secs': 0.71924} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.218705] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1680.219363] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.219531] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.220171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1680.222571] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77ce9c98-823b-4fb6-bc4b-094f8ac75073 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.229204] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1680.229204] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f4501e2-904e-4428-8d6a-2df5dfcf53f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.229944] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1680.229944] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f" [ 1680.229944] env[63297]: _type = "Task" [ 1680.229944] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.234862] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1680.234862] env[63297]: value = "task-1698145" [ 1680.234862] env[63297]: _type = "Task" [ 1680.234862] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.241052] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.247746] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.293524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.329s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.296153] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.946s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.340247] env[63297]: DEBUG nova.network.neutron [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updated VIF entry in instance network info cache for port dc0af285-6a18-4cb7-b669-1b9a78865789. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1680.342485] env[63297]: DEBUG nova.network.neutron [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance_info_cache with network_info: [{"id": "dc0af285-6a18-4cb7-b669-1b9a78865789", "address": "fa:16:3e:f3:6b:12", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc0af285-6a", "ovs_interfaceid": "dc0af285-6a18-4cb7-b669-1b9a78865789", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.342743] env[63297]: DEBUG nova.network.neutron [req-a6bdb636-4b72-4455-9710-c92f83b30329 req-f6c667bd-129f-44b7-9ed9-347ce579eb1b service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updated VIF entry in instance network info cache for port bb862c99-f006-416a-9b98-0fb287a5d194. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1680.343044] env[63297]: DEBUG nova.network.neutron [req-a6bdb636-4b72-4455-9710-c92f83b30329 req-f6c667bd-129f-44b7-9ed9-347ce579eb1b service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [{"id": "bb862c99-f006-416a-9b98-0fb287a5d194", "address": "fa:16:3e:03:4f:8d", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbb862c99-f0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.348179] env[63297]: DEBUG nova.network.neutron [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1680.554441] env[63297]: DEBUG nova.network.neutron [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Updating instance_info_cache with network_info: [{"id": "465bbc4d-f40c-47fa-bc69-986670184c65", "address": "fa:16:3e:ec:50:b6", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap465bbc4d-f4", "ovs_interfaceid": "465bbc4d-f40c-47fa-bc69-986670184c65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.593141] env[63297]: DEBUG nova.objects.base [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Object Instance<66b7a1e5-5e74-49db-99f3-4427d7297bf2> lazy-loaded attributes: flavor,pci_requests {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1680.593397] env[63297]: DEBUG nova.network.neutron [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1680.604481] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.604779] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.604993] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.605198] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.605370] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.610591] env[63297]: INFO nova.compute.manager [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Terminating instance [ 1680.617147] env[63297]: DEBUG nova.compute.manager [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1680.617361] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1680.618265] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabe0764-e862-48ff-8f0f-c7e393c7d72d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.629764] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1680.630151] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c23702ac-8890-4130-84b3-eced8d3dd70a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.638520] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1680.638520] env[63297]: value = "task-1698146" [ 1680.638520] env[63297]: _type = "Task" [ 1680.638520] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.648614] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.669552] env[63297]: DEBUG nova.policy [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1680.741907] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.747146] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698145, 'name': PowerOffVM_Task, 'duration_secs': 0.460283} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.747444] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1680.747615] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1680.747877] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57e1a181-3d37-4c59-a48b-a3fcad976b70 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.806458] env[63297]: INFO nova.compute.claims [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1680.820551] env[63297]: DEBUG oslo_concurrency.lockutils [None req-52be517f-8356-40dd-a532-c05276a8e6fc tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.975s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.821630] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.130s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.821822] env[63297]: INFO nova.compute.manager [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Unshelving [ 1680.843684] env[63297]: DEBUG oslo_concurrency.lockutils [req-ef07357c-f5fc-4108-b906-2edf20446c23 req-88154f4f-b853-473e-9c8c-c54faf5abece service nova] Releasing lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.845292] env[63297]: DEBUG oslo_concurrency.lockutils [req-a6bdb636-4b72-4455-9710-c92f83b30329 req-f6c667bd-129f-44b7-9ed9-347ce579eb1b service nova] Releasing lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.970075] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1680.970375] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1680.970568] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleting the datastore file [datastore1] 96265295-6b0c-4803-bb89-6166c9d3fc7f {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1680.970878] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c139040-be83-43d7-9232-d0161552215a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.977911] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1680.977911] env[63297]: value = "task-1698148" [ 1680.977911] env[63297]: _type = "Task" [ 1680.977911] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.985921] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.056908] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "refresh_cache-314c2cd3-6f1d-4d74-ad84-d7cc44375456" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.057227] env[63297]: DEBUG nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Instance network_info: |[{"id": "465bbc4d-f40c-47fa-bc69-986670184c65", "address": "fa:16:3e:ec:50:b6", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap465bbc4d-f4", "ovs_interfaceid": "465bbc4d-f40c-47fa-bc69-986670184c65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1681.057819] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:50:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '465bbc4d-f40c-47fa-bc69-986670184c65', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1681.066041] env[63297]: DEBUG oslo.service.loopingcall [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1681.066275] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1681.066504] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f016df44-1284-4356-a6ac-e67a45869ca6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.090021] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1681.090021] env[63297]: value = "task-1698149" [ 1681.090021] env[63297]: _type = "Task" [ 1681.090021] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.096100] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698149, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.147585] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698146, 'name': PowerOffVM_Task, 'duration_secs': 0.206605} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.147856] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1681.148041] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1681.148315] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2346235-594c-4fcf-8dda-f2cec0fcf05c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.241695] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.249543] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1681.249771] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1681.249947] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleting the datastore file [datastore1] b1ed5d76-d358-49d3-a854-8f968bc987ad {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1681.250336] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2139f980-ff0c-4040-ba83-b260201970cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.256894] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1681.256894] env[63297]: value = "task-1698151" [ 1681.256894] env[63297]: _type = "Task" [ 1681.256894] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.264926] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.319175] env[63297]: INFO nova.compute.resource_tracker [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating resource usage from migration 51930a5f-a62a-41e1-9334-d43d3d7caf8f [ 1681.488849] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.491276] env[63297]: DEBUG nova.compute.manager [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Received event network-vif-plugged-465bbc4d-f40c-47fa-bc69-986670184c65 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1681.491276] env[63297]: DEBUG oslo_concurrency.lockutils [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] Acquiring lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.491474] env[63297]: DEBUG oslo_concurrency.lockutils [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.491612] env[63297]: DEBUG oslo_concurrency.lockutils [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.491737] env[63297]: DEBUG nova.compute.manager [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] No waiting events found dispatching network-vif-plugged-465bbc4d-f40c-47fa-bc69-986670184c65 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1681.491922] env[63297]: WARNING nova.compute.manager [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Received unexpected event network-vif-plugged-465bbc4d-f40c-47fa-bc69-986670184c65 for instance with vm_state building and task_state spawning. [ 1681.492059] env[63297]: DEBUG nova.compute.manager [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Received event network-changed-465bbc4d-f40c-47fa-bc69-986670184c65 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1681.492258] env[63297]: DEBUG nova.compute.manager [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Refreshing instance network info cache due to event network-changed-465bbc4d-f40c-47fa-bc69-986670184c65. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1681.492437] env[63297]: DEBUG oslo_concurrency.lockutils [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] Acquiring lock "refresh_cache-314c2cd3-6f1d-4d74-ad84-d7cc44375456" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1681.492542] env[63297]: DEBUG oslo_concurrency.lockutils [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] Acquired lock "refresh_cache-314c2cd3-6f1d-4d74-ad84-d7cc44375456" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.492693] env[63297]: DEBUG nova.network.neutron [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Refreshing network info cache for port 465bbc4d-f40c-47fa-bc69-986670184c65 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1681.540342] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83826ae-e977-4cb5-89ed-90c227224024 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.548065] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ffa5f2-470c-4835-b860-b0f06228c8fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.581596] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29466f68-0e38-4c22-9982-05e39a4b04ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.588999] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4800d699-cdbc-44a6-8f32-7aae74a5238c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.605475] env[63297]: DEBUG nova.compute.provider_tree [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1681.609470] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698149, 'name': CreateVM_Task, 'duration_secs': 0.365442} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.609834] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1681.610466] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1681.633255] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.633477] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.741615] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.766409] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.835336] env[63297]: DEBUG nova.compute.utils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1681.992241] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.112296] env[63297]: DEBUG nova.scheduler.client.report [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1682.136237] env[63297]: DEBUG nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1682.219464] env[63297]: DEBUG nova.network.neutron [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Updated VIF entry in instance network info cache for port 465bbc4d-f40c-47fa-bc69-986670184c65. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1682.219773] env[63297]: DEBUG nova.network.neutron [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Updating instance_info_cache with network_info: [{"id": "465bbc4d-f40c-47fa-bc69-986670184c65", "address": "fa:16:3e:ec:50:b6", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap465bbc4d-f4", "ovs_interfaceid": "465bbc4d-f40c-47fa-bc69-986670184c65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.246488] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.266946] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.339590] env[63297]: INFO nova.virt.block_device [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Booting with volume 4890cca6-2688-4f8a-89b5-29f10002cfe8 at /dev/sdb [ 1682.383426] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80239ad6-fde2-4b85-a6bb-40104e6648c0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.392653] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0362c17d-6f97-4eab-ad52-fc114dc2ad44 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.427937] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9581bfe7-af90-462a-8a5f-3b21e084b2db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.436791] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85373cfa-ee0f-4c0f-bd4f-880faba204c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.471444] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e16cfb-e38b-43a6-9493-039737f68694 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.479574] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda948ac-5932-4a16-9d71-2fcba71baf67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.493511] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.498027] env[63297]: DEBUG nova.virt.block_device [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating existing volume attachment record: 488b7a58-aceb-4693-a62e-0b9229c954f9 {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1682.591591] env[63297]: DEBUG nova.network.neutron [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Successfully updated port: bbed31c2-2385-4195-90af-10f04ce61f2f {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1682.618481] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.322s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.618917] env[63297]: INFO nova.compute.manager [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Migrating [ 1682.657811] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.658085] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.659634] env[63297]: INFO nova.compute.claims [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1682.722213] env[63297]: DEBUG oslo_concurrency.lockutils [req-6437734f-9a27-48d3-9cc1-240337548f73 req-69a9ff4d-4530-4736-bac7-781b63550b0d service nova] Releasing lock "refresh_cache-314c2cd3-6f1d-4d74-ad84-d7cc44375456" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1682.742931] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.769845] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.992029] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.095083] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.095272] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.095460] env[63297]: DEBUG nova.network.neutron [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1683.134640] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.135267] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.135468] env[63297]: DEBUG nova.network.neutron [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1683.243126] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.269137] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.493110] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.625069] env[63297]: DEBUG nova.compute.manager [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-vif-plugged-bbed31c2-2385-4195-90af-10f04ce61f2f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1683.625438] env[63297]: DEBUG oslo_concurrency.lockutils [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.625507] env[63297]: DEBUG oslo_concurrency.lockutils [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.625664] env[63297]: DEBUG oslo_concurrency.lockutils [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.625829] env[63297]: DEBUG nova.compute.manager [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] No waiting events found dispatching network-vif-plugged-bbed31c2-2385-4195-90af-10f04ce61f2f {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1683.625993] env[63297]: WARNING nova.compute.manager [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received unexpected event network-vif-plugged-bbed31c2-2385-4195-90af-10f04ce61f2f for instance with vm_state active and task_state None. [ 1683.626214] env[63297]: DEBUG nova.compute.manager [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-changed-bbed31c2-2385-4195-90af-10f04ce61f2f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1683.626301] env[63297]: DEBUG nova.compute.manager [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Refreshing instance network info cache due to event network-changed-bbed31c2-2385-4195-90af-10f04ce61f2f. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1683.626528] env[63297]: DEBUG oslo_concurrency.lockutils [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] Acquiring lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.638530] env[63297]: WARNING nova.network.neutron [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] 250ab20f-3057-41ed-bb65-926464a4e926 already exists in list: networks containing: ['250ab20f-3057-41ed-bb65-926464a4e926']. ignoring it [ 1683.638725] env[63297]: WARNING nova.network.neutron [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] 250ab20f-3057-41ed-bb65-926464a4e926 already exists in list: networks containing: ['250ab20f-3057-41ed-bb65-926464a4e926']. ignoring it [ 1683.745257] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c71cc2-481d-b103-c974-4f951494121f, 'name': SearchDatastore_Task, 'duration_secs': 3.221512} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.745595] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.745826] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1683.746066] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.746216] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.746524] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1683.746939] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.747265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1683.747571] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-025cee8a-2f8f-47e6-aa1b-f991bd7ae0b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.759112] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deb8929f-0103-4663-a5bd-5b8822b80fa6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.768599] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1683.768599] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b65cb6-f2ba-4b8e-69a7-230d4af8ba29" [ 1683.768599] env[63297]: _type = "Task" [ 1683.768599] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.772570] env[63297]: DEBUG oslo_vmware.api [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698151, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.363681} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.772792] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1683.772958] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1683.776525] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.776903] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.776903] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.777072] env[63297]: INFO nova.compute.manager [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Took 3.16 seconds to destroy the instance on the hypervisor. [ 1683.777311] env[63297]: DEBUG oslo.service.loopingcall [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.777499] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-632c19d9-a9f9-4522-9fa0-75ca81e515fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.784036] env[63297]: DEBUG nova.compute.manager [-] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1683.784151] env[63297]: DEBUG nova.network.neutron [-] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1683.789376] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1683.789376] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52edd4f8-a1a0-5097-1f10-a3e1b4b6c967" [ 1683.789376] env[63297]: _type = "Task" [ 1683.789376] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.793498] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b65cb6-f2ba-4b8e-69a7-230d4af8ba29, 'name': SearchDatastore_Task, 'duration_secs': 0.00952} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.799259] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.799499] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1683.799704] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.808074] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52edd4f8-a1a0-5097-1f10-a3e1b4b6c967, 'name': SearchDatastore_Task, 'duration_secs': 0.00897} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.808898] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aab1d4e-2a0c-453f-b1e1-43132ccf85d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.814894] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1683.814894] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d8593-2f19-e9de-76c3-62473ee396db" [ 1683.814894] env[63297]: _type = "Task" [ 1683.814894] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.826485] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528d8593-2f19-e9de-76c3-62473ee396db, 'name': SearchDatastore_Task, 'duration_secs': 0.00948} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.826764] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.827043] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ffaa1402-5b51-4393-82c7-d9db964edfd3/ffaa1402-5b51-4393-82c7-d9db964edfd3.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1683.827333] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.827523] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1683.827738] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0633a07c-5e9a-4076-88bd-71629e843f51 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.829700] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56f8b004-440f-4366-8da3-7810795edb4a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.838552] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1683.838552] env[63297]: value = "task-1698155" [ 1683.838552] env[63297]: _type = "Task" [ 1683.838552] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.842714] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1683.842922] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1683.846297] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73f56ee8-1586-4290-ab77-57719a3864ed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.854957] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.858242] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1683.858242] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52688ebe-a595-7579-d292-cef4aec91ed6" [ 1683.858242] env[63297]: _type = "Task" [ 1683.858242] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.866294] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52688ebe-a595-7579-d292-cef4aec91ed6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.904443] env[63297]: DEBUG nova.network.neutron [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance_info_cache with network_info: [{"id": "51feb81a-d695-4671-800d-b58470af4ae2", "address": "fa:16:3e:55:69:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51feb81a-d6", "ovs_interfaceid": "51feb81a-d695-4671-800d-b58470af4ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.965015] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654d6082-a8d2-4126-9fbf-abc3da799eea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.973236] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab522f35-6986-4d97-9d6c-b1028987a3ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.012093] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49867854-ce28-4829-9c58-350c6ec0f70c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.020636] env[63297]: DEBUG oslo_vmware.api [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.636398} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.022942] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1684.023185] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1684.023561] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1684.023625] env[63297]: INFO nova.compute.manager [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Took 3.82 seconds to destroy the instance on the hypervisor. [ 1684.023852] env[63297]: DEBUG oslo.service.loopingcall [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1684.024144] env[63297]: DEBUG nova.compute.manager [-] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1684.024244] env[63297]: DEBUG nova.network.neutron [-] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1684.026959] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6eb969-8fe6-4803-82a6-f8017eeeabbc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.042058] env[63297]: DEBUG nova.compute.provider_tree [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1684.349167] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698155, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.355467] env[63297]: DEBUG nova.network.neutron [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a5af9937-1640-481b-b998-9090c09fa6e0", "address": "fa:16:3e:f1:dd:17", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5af9937-16", "ovs_interfaceid": "a5af9937-1640-481b-b998-9090c09fa6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bbed31c2-2385-4195-90af-10f04ce61f2f", "address": "fa:16:3e:8e:65:9b", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbed31c2-23", "ovs_interfaceid": "bbed31c2-2385-4195-90af-10f04ce61f2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.370564] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52688ebe-a595-7579-d292-cef4aec91ed6, 'name': SearchDatastore_Task, 'duration_secs': 0.008254} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.372063] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c976ae5-9e48-4058-a4ab-288ecf4b8f08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.379182] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1684.379182] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52541502-871d-6224-11ee-9e10519078ee" [ 1684.379182] env[63297]: _type = "Task" [ 1684.379182] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.387737] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52541502-871d-6224-11ee-9e10519078ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.408112] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.545032] env[63297]: DEBUG nova.scheduler.client.report [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1684.555033] env[63297]: DEBUG nova.compute.manager [req-f4f84f05-ef3b-4b56-ac35-06a853ecfb91 req-9ed83b94-9b58-4bd4-be4c-e04a8923ed7d service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Received event network-vif-deleted-c8c6ab49-f882-4349-bddd-cfb1a972afc0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1684.555033] env[63297]: INFO nova.compute.manager [req-f4f84f05-ef3b-4b56-ac35-06a853ecfb91 req-9ed83b94-9b58-4bd4-be4c-e04a8923ed7d service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Neutron deleted interface c8c6ab49-f882-4349-bddd-cfb1a972afc0; detaching it from the instance and deleting it from the info cache [ 1684.555033] env[63297]: DEBUG nova.network.neutron [req-f4f84f05-ef3b-4b56-ac35-06a853ecfb91 req-9ed83b94-9b58-4bd4-be4c-e04a8923ed7d service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.849311] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698155, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642384} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.849557] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] ffaa1402-5b51-4393-82c7-d9db964edfd3/ffaa1402-5b51-4393-82c7-d9db964edfd3.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1684.849557] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1684.849790] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-001038b6-2f04-49f4-a9f0-c49a289e8abc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.856218] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1684.856218] env[63297]: value = "task-1698157" [ 1684.856218] env[63297]: _type = "Task" [ 1684.856218] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.859761] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.860695] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.860891] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.861232] env[63297]: DEBUG oslo_concurrency.lockutils [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] Acquired lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.861445] env[63297]: DEBUG nova.network.neutron [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Refreshing network info cache for port bbed31c2-2385-4195-90af-10f04ce61f2f {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1684.863524] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d94a677-e183-4d51-a32e-8b978921d2ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.870975] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.884300] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1684.884535] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1684.884689] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1684.884864] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1684.885015] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1684.885163] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1684.885361] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1684.885520] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1684.885681] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1684.885840] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1684.886014] env[63297]: DEBUG nova.virt.hardware [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1684.892452] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Reconfiguring VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1684.896271] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63f4ef09-78e5-4cb5-9ebc-c274e31b6d02 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.918128] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52541502-871d-6224-11ee-9e10519078ee, 'name': SearchDatastore_Task, 'duration_secs': 0.062398} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.919435] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.919635] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 314c2cd3-6f1d-4d74-ad84-d7cc44375456/314c2cd3-6f1d-4d74-ad84-d7cc44375456.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1684.920814] env[63297]: DEBUG oslo_vmware.api [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1684.920814] env[63297]: value = "task-1698158" [ 1684.920814] env[63297]: _type = "Task" [ 1684.920814] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.920814] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98bc388b-af2c-42fa-9488-6e035ab9eb47 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.935221] env[63297]: DEBUG oslo_vmware.api [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698158, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.935221] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1684.935221] env[63297]: value = "task-1698159" [ 1684.935221] env[63297]: _type = "Task" [ 1684.935221] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.942760] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.031973] env[63297]: DEBUG nova.network.neutron [-] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.054574] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.054574] env[63297]: DEBUG nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1685.057642] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7e39d76-33bb-44c2-9602-70eb6ff03861 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.067506] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7952c433-4d86-4137-8b7e-8d764885970d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.100956] env[63297]: DEBUG nova.compute.manager [req-f4f84f05-ef3b-4b56-ac35-06a853ecfb91 req-9ed83b94-9b58-4bd4-be4c-e04a8923ed7d service nova] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Detach interface failed, port_id=c8c6ab49-f882-4349-bddd-cfb1a972afc0, reason: Instance b1ed5d76-d358-49d3-a854-8f968bc987ad could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1685.174042] env[63297]: DEBUG nova.network.neutron [-] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.366597] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.218117} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.366890] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1685.369844] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b27b73f-b270-4a6c-a7ac-9d68285e60f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.397782] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] ffaa1402-5b51-4393-82c7-d9db964edfd3/ffaa1402-5b51-4393-82c7-d9db964edfd3.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1685.398130] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e76bdaa4-0ee7-45e1-adae-117f99639414 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.424596] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1685.424596] env[63297]: value = "task-1698160" [ 1685.424596] env[63297]: _type = "Task" [ 1685.424596] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.438722] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698160, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.439500] env[63297]: DEBUG oslo_vmware.api [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698158, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.448030] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698159, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.532633] env[63297]: INFO nova.compute.manager [-] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Took 1.75 seconds to deallocate network for instance. [ 1685.557900] env[63297]: DEBUG nova.compute.utils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1685.559559] env[63297]: DEBUG nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1685.559748] env[63297]: DEBUG nova.network.neutron [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1685.636130] env[63297]: DEBUG nova.policy [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c1a03cca2224878ae9ccbb9e42a1b2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd636a91a492a4f538bc2fc8634f5fa14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1685.661390] env[63297]: DEBUG nova.network.neutron [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updated VIF entry in instance network info cache for port bbed31c2-2385-4195-90af-10f04ce61f2f. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1685.662419] env[63297]: DEBUG nova.network.neutron [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a5af9937-1640-481b-b998-9090c09fa6e0", "address": "fa:16:3e:f1:dd:17", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5af9937-16", "ovs_interfaceid": "a5af9937-1640-481b-b998-9090c09fa6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bbed31c2-2385-4195-90af-10f04ce61f2f", "address": "fa:16:3e:8e:65:9b", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbed31c2-23", "ovs_interfaceid": "bbed31c2-2385-4195-90af-10f04ce61f2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.677268] env[63297]: INFO nova.compute.manager [-] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Took 1.65 seconds to deallocate network for instance. [ 1685.935089] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eab0f92-4e87-4909-8c6e-a102ba3f5bad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.938444] env[63297]: DEBUG nova.network.neutron [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Successfully created port: 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1685.964610] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698160, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.965054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance 'd7dc0672-c908-418e-bfcb-8daa761fba37' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1685.969321] env[63297]: DEBUG oslo_vmware.api [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698158, 'name': ReconfigVM_Task, 'duration_secs': 0.789874} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.970585] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.970824] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Reconfigured VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1685.977138] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.966659} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.977674] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 314c2cd3-6f1d-4d74-ad84-d7cc44375456/314c2cd3-6f1d-4d74-ad84-d7cc44375456.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1685.977965] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1685.978293] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1060f488-54a4-4c30-9a64-aabf83130a70 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.986247] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1685.986247] env[63297]: value = "task-1698161" [ 1685.986247] env[63297]: _type = "Task" [ 1685.986247] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.995995] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698161, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.043522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.043522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.043768] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.062931] env[63297]: DEBUG nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1686.075891] env[63297]: INFO nova.scheduler.client.report [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted allocations for instance b1ed5d76-d358-49d3-a854-8f968bc987ad [ 1686.165489] env[63297]: DEBUG oslo_concurrency.lockutils [req-cb5c9a01-2c6e-404b-8798-78b908c38132 req-7d651d77-33c4-4179-8eb6-b7f701917439 service nova] Releasing lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.184399] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.185260] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.185260] env[63297]: DEBUG nova.objects.instance [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'resources' on Instance uuid 96265295-6b0c-4803-bb89-6166c9d3fc7f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1686.436731] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698160, 'name': ReconfigVM_Task, 'duration_secs': 0.667858} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.436970] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Reconfigured VM instance instance-00000068 to attach disk [datastore1] ffaa1402-5b51-4393-82c7-d9db964edfd3/ffaa1402-5b51-4393-82c7-d9db964edfd3.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1686.437619] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a2112fd-c791-4167-b830-04b89292c288 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.444241] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1686.444241] env[63297]: value = "task-1698162" [ 1686.444241] env[63297]: _type = "Task" [ 1686.444241] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.452531] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698162, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.472594] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1686.472922] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35bc333d-cf28-415b-bdf2-b4cb40196fed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.479144] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5016ae9d-0bac-40f9-908e-ed805b95eae6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-bbed31c2-2385-4195-90af-10f04ce61f2f" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.030s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.481991] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1686.481991] env[63297]: value = "task-1698163" [ 1686.481991] env[63297]: _type = "Task" [ 1686.481991] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.495079] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.502259] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698161, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097137} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.503063] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1686.503616] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7a1732-2293-44ed-b3cd-b707ae100d41 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.526678] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 314c2cd3-6f1d-4d74-ad84-d7cc44375456/314c2cd3-6f1d-4d74-ad84-d7cc44375456.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1686.526999] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fc745fb-4e4f-4030-a5c5-02392b8ce897 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.547848] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1686.547848] env[63297]: value = "task-1698164" [ 1686.547848] env[63297]: _type = "Task" [ 1686.547848] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.556868] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698164, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.584099] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f73bbf97-bb55-4271-8355-96a41c12323a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "b1ed5d76-d358-49d3-a854-8f968bc987ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.979s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.866487] env[63297]: DEBUG nova.compute.manager [req-93341eae-c349-41a5-b6b9-09006e392158 req-ead3c0e0-fe40-48b6-81b5-b3a1d31cb5ce service nova] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Received event network-vif-deleted-cc29c84c-0884-4feb-9a78-7098d11b28ab {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1686.951015] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd1237c-6a8b-46c1-a432-1217e939a4d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.964281] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b183766-fe84-402c-a0ac-eedf41fd2ebc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.967769] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698162, 'name': Rename_Task, 'duration_secs': 0.168594} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.968060] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1686.968645] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34506464-f7bc-496e-81c9-3b58467aba0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.006622] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a9ccb8-06a9-42db-9b62-187b42335fb3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.009974] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1687.009974] env[63297]: value = "task-1698165" [ 1687.009974] env[63297]: _type = "Task" [ 1687.009974] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.019647] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698163, 'name': PowerOffVM_Task, 'duration_secs': 0.294276} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.020476] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1687.020704] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance 'd7dc0672-c908-418e-bfcb-8daa761fba37' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1687.025375] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd8e39f-f169-4bc1-b4b7-8320acec326b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.034209] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698165, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.044657] env[63297]: DEBUG nova.compute.provider_tree [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1687.059378] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.073498] env[63297]: DEBUG nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1687.104754] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1687.105035] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1687.105198] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1687.105407] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1687.105601] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1687.105766] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1687.105978] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1687.106166] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1687.106350] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1687.106679] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1687.106905] env[63297]: DEBUG nova.virt.hardware [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1687.107859] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0835a6-5bbd-4ead-86c5-4f0b75868d3c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.116333] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5512688f-270c-43f3-8dd2-499dfc73ce77 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.456109] env[63297]: DEBUG nova.network.neutron [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Successfully updated port: 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.469010] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.469314] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.520127] env[63297]: DEBUG oslo_vmware.api [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698165, 'name': PowerOnVM_Task, 'duration_secs': 0.537134} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.520490] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1687.520715] env[63297]: INFO nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Took 11.10 seconds to spawn the instance on the hypervisor. [ 1687.520895] env[63297]: DEBUG nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1687.521689] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d751a6d7-448a-433e-bc28-209c00f20294 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.532521] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1687.532860] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1687.532952] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1687.533141] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1687.533289] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1687.533456] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1687.533674] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1687.533878] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1687.534036] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1687.534169] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1687.534339] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1687.539968] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3dd1593-aba5-4718-9c57-9afc5df72890 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.552129] env[63297]: DEBUG nova.scheduler.client.report [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1687.566978] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698164, 'name': ReconfigVM_Task, 'duration_secs': 0.773026} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.567258] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1687.567258] env[63297]: value = "task-1698166" [ 1687.567258] env[63297]: _type = "Task" [ 1687.567258] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.568459] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 314c2cd3-6f1d-4d74-ad84-d7cc44375456/314c2cd3-6f1d-4d74-ad84-d7cc44375456.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1687.569136] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-adfa5ba1-3db6-4a98-9060-d3044ee703e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.579659] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698166, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.580906] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1687.580906] env[63297]: value = "task-1698167" [ 1687.580906] env[63297]: _type = "Task" [ 1687.580906] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.589895] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698167, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.958677] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.958987] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.958987] env[63297]: DEBUG nova.network.neutron [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1687.973836] env[63297]: DEBUG nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1688.047830] env[63297]: INFO nova.compute.manager [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Took 19.60 seconds to build instance. [ 1688.056716] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.872s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.078576] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698166, 'name': ReconfigVM_Task, 'duration_secs': 0.2158} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.079132] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance 'd7dc0672-c908-418e-bfcb-8daa761fba37' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1688.091134] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698167, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.092612] env[63297]: INFO nova.scheduler.client.report [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleted allocations for instance 96265295-6b0c-4803-bb89-6166c9d3fc7f [ 1688.119451] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.119714] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.119920] env[63297]: DEBUG nova.objects.instance [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'pci_requests' on Instance uuid 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1688.316770] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-a5af9937-1640-481b-b998-9090c09fa6e0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.317065] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-a5af9937-1640-481b-b998-9090c09fa6e0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.492755] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.518941] env[63297]: DEBUG nova.network.neutron [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1688.551913] env[63297]: DEBUG oslo_concurrency.lockutils [None req-20b0334d-bc18-470d-859f-64ff17d5dfaf tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.116s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.589936] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1688.590242] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1688.590359] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1688.590540] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1688.590686] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1688.590830] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1688.591045] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1688.591209] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1688.591375] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1688.591545] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1688.591712] env[63297]: DEBUG nova.virt.hardware [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1688.597102] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1688.599553] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eae7ff69-ca4f-485a-8798-2562b8322510 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.616867] env[63297]: DEBUG oslo_concurrency.lockutils [None req-44245acb-a6c3-46c1-a120-2554782e8785 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "96265295-6b0c-4803-bb89-6166c9d3fc7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.420s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.617878] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698167, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.623417] env[63297]: DEBUG nova.objects.instance [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'numa_topology' on Instance uuid 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1688.625607] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1688.625607] env[63297]: value = "task-1698168" [ 1688.625607] env[63297]: _type = "Task" [ 1688.625607] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.635045] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698168, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.713346] env[63297]: DEBUG nova.network.neutron [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updating instance_info_cache with network_info: [{"id": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "address": "fa:16:3e:7a:3c:bc", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b8e5d7-42", "ovs_interfaceid": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.819807] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.820014] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.821145] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2386e05c-e86c-49a3-b542-6b97684645af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.842548] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1149154b-2810-49d9-8fe1-85f8a34abb51 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.873738] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Reconfiguring VM to detach interface {{(pid=63297) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1688.874089] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a481f53d-0ec7-457a-a147-cc3b2524050a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.892541] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1688.892541] env[63297]: value = "task-1698169" [ 1688.892541] env[63297]: _type = "Task" [ 1688.892541] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.895090] env[63297]: DEBUG nova.compute.manager [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Received event network-vif-plugged-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1688.895295] env[63297]: DEBUG oslo_concurrency.lockutils [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] Acquiring lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.895495] env[63297]: DEBUG oslo_concurrency.lockutils [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.895691] env[63297]: DEBUG oslo_concurrency.lockutils [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.895872] env[63297]: DEBUG nova.compute.manager [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] No waiting events found dispatching network-vif-plugged-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1688.896083] env[63297]: WARNING nova.compute.manager [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Received unexpected event network-vif-plugged-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d for instance with vm_state building and task_state spawning. [ 1688.896266] env[63297]: DEBUG nova.compute.manager [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Received event network-changed-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1688.896420] env[63297]: DEBUG nova.compute.manager [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Refreshing instance network info cache due to event network-changed-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1688.896587] env[63297]: DEBUG oslo_concurrency.lockutils [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] Acquiring lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.904915] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.094893] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698167, 'name': Rename_Task, 'duration_secs': 1.149406} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.095186] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1689.095438] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8d84b8e-cedd-4396-877f-299fdf320685 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.101825] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1689.101825] env[63297]: value = "task-1698170" [ 1689.101825] env[63297]: _type = "Task" [ 1689.101825] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.109467] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.126739] env[63297]: INFO nova.compute.claims [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1689.138094] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698168, 'name': ReconfigVM_Task, 'duration_secs': 0.171373} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.138310] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1689.139154] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2161d1-7aa0-4ca9-a514-82975c9bda19 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.162100] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] d7dc0672-c908-418e-bfcb-8daa761fba37/d7dc0672-c908-418e-bfcb-8daa761fba37.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1689.162430] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3728945-4c1a-4c93-989f-154112ae24dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.181313] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1689.181313] env[63297]: value = "task-1698171" [ 1689.181313] env[63297]: _type = "Task" [ 1689.181313] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.194923] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698171, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.216754] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Releasing lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.217021] env[63297]: DEBUG nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Instance network_info: |[{"id": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "address": "fa:16:3e:7a:3c:bc", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b8e5d7-42", "ovs_interfaceid": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1689.217324] env[63297]: DEBUG oslo_concurrency.lockutils [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] Acquired lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.217513] env[63297]: DEBUG nova.network.neutron [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Refreshing network info cache for port 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1689.218776] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:3c:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1689.227131] env[63297]: DEBUG oslo.service.loopingcall [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.228865] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1689.228865] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14583c62-a08e-40aa-83ee-fdcc29ae9e2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.250258] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1689.250258] env[63297]: value = "task-1698172" [ 1689.250258] env[63297]: _type = "Task" [ 1689.250258] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.259408] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698172, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.407286] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.525553] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.525927] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.611455] env[63297]: DEBUG oslo_vmware.api [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698170, 'name': PowerOnVM_Task, 'duration_secs': 0.459591} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.611875] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1689.612136] env[63297]: INFO nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Took 10.84 seconds to spawn the instance on the hypervisor. [ 1689.612347] env[63297]: DEBUG nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1689.613282] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5dfa42-3818-4a31-8e1c-85cae05269f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.695821] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698171, 'name': ReconfigVM_Task, 'duration_secs': 0.299114} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.697740] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Reconfigured VM instance instance-00000067 to attach disk [datastore1] d7dc0672-c908-418e-bfcb-8daa761fba37/d7dc0672-c908-418e-bfcb-8daa761fba37.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1689.698038] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance 'd7dc0672-c908-418e-bfcb-8daa761fba37' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1689.701461] env[63297]: DEBUG nova.compute.manager [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1689.761738] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698172, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.904362] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.923245] env[63297]: DEBUG nova.network.neutron [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updated VIF entry in instance network info cache for port 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1689.923637] env[63297]: DEBUG nova.network.neutron [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updating instance_info_cache with network_info: [{"id": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "address": "fa:16:3e:7a:3c:bc", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b8e5d7-42", "ovs_interfaceid": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.029087] env[63297]: INFO nova.compute.manager [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Detaching volume 958a8520-a443-40be-8c9d-7f0dbc3abcfd [ 1690.065731] env[63297]: INFO nova.virt.block_device [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Attempting to driver detach volume 958a8520-a443-40be-8c9d-7f0dbc3abcfd from mountpoint /dev/sdb [ 1690.066028] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1690.066239] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353884', 'volume_id': '958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'name': 'volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f', 'attached_at': '', 'detached_at': '', 'volume_id': '958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'serial': '958a8520-a443-40be-8c9d-7f0dbc3abcfd'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1690.067559] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dd4f07-cb2b-455e-bf68-2284f5cfc9b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.090484] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e0527a-98ce-4f3c-a0f9-66261cdb2cf4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.097991] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37545dd5-b423-4208-b8d5-90feea9ab96c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.118692] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14482c08-27ad-4e92-89fb-6adf86da3dce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.140915] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] The volume has not been displaced from its original location: [datastore1] volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd/volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1690.146575] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Reconfiguring VM instance instance-00000024 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1690.148997] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9524b5d7-6aea-47a3-bdc3-b3861b8be34e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.162722] env[63297]: INFO nova.compute.manager [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Took 18.91 seconds to build instance. [ 1690.172136] env[63297]: DEBUG oslo_vmware.api [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1690.172136] env[63297]: value = "task-1698173" [ 1690.172136] env[63297]: _type = "Task" [ 1690.172136] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.180451] env[63297]: DEBUG oslo_vmware.api [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698173, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.212083] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d791f74-c770-4434-b1c7-5e6ecc1d55d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.235658] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.236505] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e480867c-f861-4f08-b8e9-66b490fc893d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.256428] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance 'd7dc0672-c908-418e-bfcb-8daa761fba37' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1690.271430] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698172, 'name': CreateVM_Task, 'duration_secs': 0.589117} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.271601] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1690.272317] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.272531] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.272886] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1690.273181] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f4586d0-4fb6-4de9-a14a-cc7a1422db53 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.279115] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1690.279115] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52df1ab9-ba02-c5d6-e3b1-357c7451e990" [ 1690.279115] env[63297]: _type = "Task" [ 1690.279115] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.287408] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52df1ab9-ba02-c5d6-e3b1-357c7451e990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.405577] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.426736] env[63297]: DEBUG oslo_concurrency.lockutils [req-73034c89-8d93-4d86-bdd1-700b563fbc3f req-5e864d4e-400e-4aab-9dc4-1e52d67615de service nova] Releasing lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.433798] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff27cf6-d4e5-4c45-ac05-87d7d36de55a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.441988] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a9f196-c8f4-4aa1-b87e-4da7fe594313 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.477641] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45dc6598-c47d-4fd9-a112-c5bc6287a512 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.485985] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22f0e3c-2f20-42ab-8a06-d9dd8bf2ffba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.499802] env[63297]: DEBUG nova.compute.provider_tree [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1690.665709] env[63297]: DEBUG oslo_concurrency.lockutils [None req-409c4cb0-8904-4415-b00b-beff7de121ce tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.424s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.682492] env[63297]: DEBUG oslo_vmware.api [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698173, 'name': ReconfigVM_Task, 'duration_secs': 0.273832} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.683297] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Reconfigured VM instance instance-00000024 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1690.688615] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d8c3ccd-8bff-4718-82a0-7bb7cb1e391d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.704603] env[63297]: DEBUG oslo_vmware.api [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1690.704603] env[63297]: value = "task-1698174" [ 1690.704603] env[63297]: _type = "Task" [ 1690.704603] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.713760] env[63297]: DEBUG oslo_vmware.api [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698174, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.789619] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52df1ab9-ba02-c5d6-e3b1-357c7451e990, 'name': SearchDatastore_Task, 'duration_secs': 0.01916} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.791961] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.791961] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1690.791961] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.791961] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.791961] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1690.791961] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86103059-10fb-4b92-b4d3-696c62a937d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.800632] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1690.800972] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1690.801570] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec036087-fc52-41b9-8958-6569d19d3f6b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.804748] env[63297]: DEBUG nova.network.neutron [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Port 51feb81a-d695-4671-800d-b58470af4ae2 binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1690.809763] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1690.809763] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fa6947-9d12-dd4b-6983-25d618eabc31" [ 1690.809763] env[63297]: _type = "Task" [ 1690.809763] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.818142] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fa6947-9d12-dd4b-6983-25d618eabc31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.905642] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.005137] env[63297]: DEBUG nova.scheduler.client.report [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1691.172829] env[63297]: DEBUG oslo_concurrency.lockutils [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.173264] env[63297]: DEBUG oslo_concurrency.lockutils [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.217360] env[63297]: DEBUG oslo_vmware.api [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698174, 'name': ReconfigVM_Task, 'duration_secs': 0.183247} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.217642] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353884', 'volume_id': '958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'name': 'volume-958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f', 'attached_at': '', 'detached_at': '', 'volume_id': '958a8520-a443-40be-8c9d-7f0dbc3abcfd', 'serial': '958a8520-a443-40be-8c9d-7f0dbc3abcfd'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1691.252741] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.253023] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.253208] env[63297]: DEBUG nova.compute.manager [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1691.254134] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd13b7b0-4196-4200-8c5a-e08f26c6e536 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.260623] env[63297]: DEBUG nova.compute.manager [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1691.261207] env[63297]: DEBUG nova.objects.instance [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lazy-loading 'flavor' on Instance uuid 314c2cd3-6f1d-4d74-ad84-d7cc44375456 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1691.321054] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fa6947-9d12-dd4b-6983-25d618eabc31, 'name': SearchDatastore_Task, 'duration_secs': 0.010115} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.321466] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27d05728-48ee-49c8-82c2-b7febdc47a88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.326314] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1691.326314] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528cb68d-3d25-6ff2-084c-2bda435f1f44" [ 1691.326314] env[63297]: _type = "Task" [ 1691.326314] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.333504] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528cb68d-3d25-6ff2-084c-2bda435f1f44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.406599] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.510615] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.391s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.512810] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.020s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.514384] env[63297]: INFO nova.compute.claims [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1691.544723] env[63297]: INFO nova.network.neutron [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating port bb862c99-f006-416a-9b98-0fb287a5d194 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1691.676760] env[63297]: INFO nova.compute.manager [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Detaching volume 540b2a55-1e8e-476b-87e5-7c4753f15ddb [ 1691.709338] env[63297]: INFO nova.virt.block_device [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Attempting to driver detach volume 540b2a55-1e8e-476b-87e5-7c4753f15ddb from mountpoint /dev/sdb [ 1691.709338] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1691.709338] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353987', 'volume_id': '540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'name': 'volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1d8c6df5-069f-4647-a2f6-e69a4bf8be94', 'attached_at': '', 'detached_at': '', 'volume_id': '540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'serial': '540b2a55-1e8e-476b-87e5-7c4753f15ddb'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1691.712448] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d310df-5036-46f4-b07a-6cdc727068c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.738271] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b9865c-a20d-4c86-a180-6ef5296f5097 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.748731] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b973c7-750a-4d3e-8f7d-5db00b4eb58f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.774169] env[63297]: DEBUG nova.objects.instance [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'flavor' on Instance uuid 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1691.776154] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55f0b61-16c0-4f65-9a03-899c9e9cdcb7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.779618] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1691.780113] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69198e6d-ca13-443b-9898-b960f3e1b8bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.800166] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] The volume has not been displaced from its original location: [datastore1] volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb/volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1691.807541] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfiguring VM instance instance-00000059 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1691.810172] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52884d08-9a1c-478e-9e8e-c5fcfdac0eea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.825831] env[63297]: DEBUG oslo_vmware.api [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1691.825831] env[63297]: value = "task-1698175" [ 1691.825831] env[63297]: _type = "Task" [ 1691.825831] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.847849] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.848207] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.848607] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.854652] env[63297]: DEBUG oslo_vmware.api [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1691.854652] env[63297]: value = "task-1698176" [ 1691.854652] env[63297]: _type = "Task" [ 1691.854652] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.867266] env[63297]: DEBUG oslo_vmware.api [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.869685] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528cb68d-3d25-6ff2-084c-2bda435f1f44, 'name': SearchDatastore_Task, 'duration_secs': 0.041781} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.869685] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.869685] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886/5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1691.869685] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51f0baae-5a6f-47fc-97b9-3cdac03cb773 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.875092] env[63297]: DEBUG oslo_vmware.api [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698176, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.880522] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1691.880522] env[63297]: value = "task-1698177" [ 1691.880522] env[63297]: _type = "Task" [ 1691.880522] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.892193] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698177, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.910721] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.344885] env[63297]: DEBUG oslo_vmware.api [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698175, 'name': PowerOffVM_Task, 'duration_secs': 0.168171} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.345237] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1692.345387] env[63297]: DEBUG nova.compute.manager [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1692.346114] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef015cb-6b43-4e8c-8cca-c0884bdff2c7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.367053] env[63297]: DEBUG oslo_vmware.api [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698176, 'name': ReconfigVM_Task, 'duration_secs': 0.246118} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.367053] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Reconfigured VM instance instance-00000059 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1692.372632] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d61d240-3065-46d6-bb5a-3ef5b370a7a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.394386] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698177, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.396553] env[63297]: DEBUG oslo_vmware.api [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1692.396553] env[63297]: value = "task-1698178" [ 1692.396553] env[63297]: _type = "Task" [ 1692.396553] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.410906] env[63297]: DEBUG oslo_vmware.api [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698178, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.414988] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.717609] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d073aa96-0ff1-4314-944d-5cc6a5420e6a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.724725] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052dc051-1615-4d35-865d-bd7580d788fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.754137] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af0bb67-079e-42f2-ad71-40f85e390b87 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.761629] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5a1965-d009-44a7-ab30-f509b22ac334 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.776207] env[63297]: DEBUG nova.compute.provider_tree [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1692.786204] env[63297]: DEBUG oslo_concurrency.lockutils [None req-adec641b-a74c-422d-8c33-55195bf9dfb4 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.260s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.860984] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c9786edd-6ade-45db-a7d1-c13d0cc6f753 tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.608s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.894164] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698177, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612187} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.895022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1692.895195] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.895367] env[63297]: DEBUG nova.network.neutron [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1692.896976] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886/5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1692.897203] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1692.897742] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6dcc75a7-23db-4508-912d-48dc19a6d07b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.913065] env[63297]: DEBUG oslo_vmware.api [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698178, 'name': ReconfigVM_Task, 'duration_secs': 0.154858} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.916790] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-353987', 'volume_id': '540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'name': 'volume-540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1d8c6df5-069f-4647-a2f6-e69a4bf8be94', 'attached_at': '', 'detached_at': '', 'volume_id': '540b2a55-1e8e-476b-87e5-7c4753f15ddb', 'serial': '540b2a55-1e8e-476b-87e5-7c4753f15ddb'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1692.918934] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1692.918934] env[63297]: value = "task-1698179" [ 1692.918934] env[63297]: _type = "Task" [ 1692.918934] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.919158] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.927665] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.131166] env[63297]: DEBUG nova.compute.manager [req-d67dda5b-177e-47df-bea6-f6c2c16c4605 req-55276fd8-c798-4160-a17c-67ecbf3b98c1 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received event network-vif-plugged-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1693.131166] env[63297]: DEBUG oslo_concurrency.lockutils [req-d67dda5b-177e-47df-bea6-f6c2c16c4605 req-55276fd8-c798-4160-a17c-67ecbf3b98c1 service nova] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.131166] env[63297]: DEBUG oslo_concurrency.lockutils [req-d67dda5b-177e-47df-bea6-f6c2c16c4605 req-55276fd8-c798-4160-a17c-67ecbf3b98c1 service nova] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.131166] env[63297]: DEBUG oslo_concurrency.lockutils [req-d67dda5b-177e-47df-bea6-f6c2c16c4605 req-55276fd8-c798-4160-a17c-67ecbf3b98c1 service nova] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.131166] env[63297]: DEBUG nova.compute.manager [req-d67dda5b-177e-47df-bea6-f6c2c16c4605 req-55276fd8-c798-4160-a17c-67ecbf3b98c1 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] No waiting events found dispatching network-vif-plugged-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1693.131166] env[63297]: WARNING nova.compute.manager [req-d67dda5b-177e-47df-bea6-f6c2c16c4605 req-55276fd8-c798-4160-a17c-67ecbf3b98c1 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received unexpected event network-vif-plugged-bb862c99-f006-416a-9b98-0fb287a5d194 for instance with vm_state shelved_offloaded and task_state spawning. [ 1693.255164] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.255164] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.255164] env[63297]: DEBUG nova.network.neutron [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1693.279747] env[63297]: DEBUG nova.scheduler.client.report [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1693.414516] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.428742] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068804} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.429016] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1693.430091] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8016be9a-5491-4720-acd2-c1759065b1c3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.451977] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886/5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1693.454416] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03e8301b-15a5-4502-8da4-e226c3f6ec01 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.470589] env[63297]: DEBUG nova.objects.instance [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lazy-loading 'flavor' on Instance uuid 1d8c6df5-069f-4647-a2f6-e69a4bf8be94 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.479889] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1693.479889] env[63297]: value = "task-1698180" [ 1693.479889] env[63297]: _type = "Task" [ 1693.479889] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.490034] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698180, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.690725] env[63297]: DEBUG nova.network.neutron [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance_info_cache with network_info: [{"id": "51feb81a-d695-4671-800d-b58470af4ae2", "address": "fa:16:3e:55:69:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51feb81a-d6", "ovs_interfaceid": "51feb81a-d695-4671-800d-b58470af4ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.704979] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.705230] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.705377] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1693.786014] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.272s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.786014] env[63297]: DEBUG nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1693.792905] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.557s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.840167] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.840425] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.840630] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.841277] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.841458] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.843506] env[63297]: INFO nova.compute.manager [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Terminating instance [ 1693.845280] env[63297]: DEBUG nova.compute.manager [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1693.845467] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1693.846553] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1a1b87-389f-4238-99a4-aaf329c7c934 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.854591] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1693.854829] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c99d78e7-7b4a-48c9-b168-4a2269b363b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.861647] env[63297]: DEBUG oslo_vmware.api [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1693.861647] env[63297]: value = "task-1698181" [ 1693.861647] env[63297]: _type = "Task" [ 1693.861647] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.870513] env[63297]: DEBUG oslo_vmware.api [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.915054] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.990453] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698180, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.037744] env[63297]: DEBUG nova.network.neutron [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [{"id": "bb862c99-f006-416a-9b98-0fb287a5d194", "address": "fa:16:3e:03:4f:8d", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb862c99-f0", "ovs_interfaceid": "bb862c99-f006-416a-9b98-0fb287a5d194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.160338] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.160761] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.161015] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.161232] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.161404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.163503] env[63297]: INFO nova.compute.manager [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Terminating instance [ 1694.165281] env[63297]: DEBUG nova.compute.manager [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1694.165472] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1694.166311] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025699d0-5d10-4c1c-bc04-cf16fd584cf6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.173915] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1694.174154] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2bb9425-1a38-4434-9f40-3de4c87ab93c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.193300] env[63297]: DEBUG oslo_concurrency.lockutils [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.298070] env[63297]: INFO nova.compute.claims [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1694.302458] env[63297]: DEBUG nova.compute.utils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1694.303874] env[63297]: DEBUG nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1694.304065] env[63297]: DEBUG nova.network.neutron [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1694.343808] env[63297]: DEBUG nova.policy [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be5e07baf148496880261386dff8df76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e3dcd98ebe94a75a94322b03feba3b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1694.371769] env[63297]: DEBUG oslo_vmware.api [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698181, 'name': PowerOffVM_Task, 'duration_secs': 0.195457} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.373713] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1694.373896] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1694.374173] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-529a5942-fa93-4937-bcb9-1d73fd1f3c25 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.415513] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.478786] env[63297]: DEBUG oslo_concurrency.lockutils [None req-056c6192-4bc0-4a1b-b149-e4f88f0fa3a9 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.306s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.489857] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698180, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.542157] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.575259] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='80d4dde70c415f47340fede3454d8dd2',container_format='bare',created_at=2024-12-10T17:30:08Z,direct_url=,disk_format='vmdk',id=6d9483fb-3122-4c2b-800b-dca528822bb1,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-480908442-shelved',owner='01fe9157b11244cb86a7626caae0616d',properties=ImageMetaProps,protected=,size=31663104,status='active',tags=,updated_at=2024-12-10T17:30:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1694.575510] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1694.575666] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1694.575846] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1694.575989] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1694.576150] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1694.576357] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1694.576509] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1694.576670] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1694.576831] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1694.576996] env[63297]: DEBUG nova.virt.hardware [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1694.577845] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9bc667-f3e5-4a24-93ae-3f773b513c62 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.585672] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67141d4e-072f-473e-9053-9db00bf400d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.598987] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:4f:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb862c99-f006-416a-9b98-0fb287a5d194', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1694.606431] env[63297]: DEBUG oslo.service.loopingcall [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1694.606655] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1694.606855] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95fdd4f2-2e1a-445d-9f07-5cb544c9ebda {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.625600] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1694.625600] env[63297]: value = "task-1698184" [ 1694.625600] env[63297]: _type = "Task" [ 1694.625600] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.633209] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698184, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.714455] env[63297]: DEBUG nova.network.neutron [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Successfully created port: c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1694.722425] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c888603-6327-4ce2-991c-af1ebe8221b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.745848] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b6360c-2c4d-4e58-8797-db382e5917f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.753416] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance 'd7dc0672-c908-418e-bfcb-8daa761fba37' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1694.768154] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1694.768373] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1694.768556] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleting the datastore file [datastore1] 314c2cd3-6f1d-4d74-ad84-d7cc44375456 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1694.768811] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7368182-f01e-4f2c-8b47-51924795a0f8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.777361] env[63297]: DEBUG oslo_vmware.api [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1694.777361] env[63297]: value = "task-1698185" [ 1694.777361] env[63297]: _type = "Task" [ 1694.777361] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.783830] env[63297]: DEBUG oslo_vmware.api [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.792421] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1694.792623] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1694.792830] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleting the datastore file [datastore1] 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1694.793163] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-410d5ee1-52d2-4672-994c-57e6fd9cb1dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.800567] env[63297]: DEBUG oslo_vmware.api [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1694.800567] env[63297]: value = "task-1698186" [ 1694.800567] env[63297]: _type = "Task" [ 1694.800567] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.806648] env[63297]: INFO nova.compute.resource_tracker [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating resource usage from migration e3bc88a1-09ba-4143-9085-38694740e952 [ 1694.812381] env[63297]: DEBUG nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1694.818558] env[63297]: DEBUG oslo_vmware.api [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.922255] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.997055] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698180, 'name': ReconfigVM_Task, 'duration_secs': 1.034666} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.997479] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886/5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1694.998870] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d94e4aa6-af16-4ab1-999a-d520d13ed33a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.007184] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1695.007184] env[63297]: value = "task-1698187" [ 1695.007184] env[63297]: _type = "Task" [ 1695.007184] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.019599] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698187, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.116725] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8a7ff0-1ee9-47eb-b717-8d0370d96f9e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.130047] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2112c8fb-502b-40b7-a2fa-d8b1e2e014c3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.143077] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698184, 'name': CreateVM_Task, 'duration_secs': 0.385315} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.167167] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1695.168621] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.168786] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.169195] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1695.169947] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb879bf-cae5-47b4-8261-abe0b4dc801e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.172561] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e46364d9-b13d-4278-ab1f-17f7732f5588 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.178024] env[63297]: DEBUG nova.compute.manager [req-4fb1bb8f-11ab-4009-b373-f4773b6d2697 req-3eff65d3-646e-40df-87f5-9603989de527 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received event network-changed-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1695.178219] env[63297]: DEBUG nova.compute.manager [req-4fb1bb8f-11ab-4009-b373-f4773b6d2697 req-3eff65d3-646e-40df-87f5-9603989de527 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Refreshing instance network info cache due to event network-changed-bb862c99-f006-416a-9b98-0fb287a5d194. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1695.178425] env[63297]: DEBUG oslo_concurrency.lockutils [req-4fb1bb8f-11ab-4009-b373-f4773b6d2697 req-3eff65d3-646e-40df-87f5-9603989de527 service nova] Acquiring lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.178563] env[63297]: DEBUG oslo_concurrency.lockutils [req-4fb1bb8f-11ab-4009-b373-f4773b6d2697 req-3eff65d3-646e-40df-87f5-9603989de527 service nova] Acquired lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.178720] env[63297]: DEBUG nova.network.neutron [req-4fb1bb8f-11ab-4009-b373-f4773b6d2697 req-3eff65d3-646e-40df-87f5-9603989de527 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Refreshing network info cache for port bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1695.186797] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1695.186797] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523c7517-d89d-97f4-4f60-a4aa0c24ca09" [ 1695.186797] env[63297]: _type = "Task" [ 1695.186797] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.188330] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b31bf79-2d27-4a76-802d-297b5387712e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.210246] env[63297]: DEBUG nova.compute.provider_tree [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1695.212549] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.212549] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Processing image 6d9483fb-3122-4c2b-800b-dca528822bb1 {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1695.212549] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1/6d9483fb-3122-4c2b-800b-dca528822bb1.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.212549] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1/6d9483fb-3122-4c2b-800b-dca528822bb1.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.212549] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1695.213666] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d52680f-1bf6-4a36-a193-3219d8935534 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.230557] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1695.230756] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1695.231731] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bd13e31-2b56-440b-b505-5b8944ff6cae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.237354] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1695.237354] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522e4bfc-be9d-687e-d037-05c52d69239d" [ 1695.237354] env[63297]: _type = "Task" [ 1695.237354] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.247365] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522e4bfc-be9d-687e-d037-05c52d69239d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.259377] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1695.259735] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13f9b6b4-eca1-4361-b616-46e36b679a2f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.267541] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1695.267541] env[63297]: value = "task-1698188" [ 1695.267541] env[63297]: _type = "Task" [ 1695.267541] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.276507] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.284669] env[63297]: DEBUG oslo_vmware.api [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155283} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.284915] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1695.285105] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1695.285285] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1695.285452] env[63297]: INFO nova.compute.manager [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1695.285694] env[63297]: DEBUG oslo.service.loopingcall [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.285887] env[63297]: DEBUG nova.compute.manager [-] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1695.285980] env[63297]: DEBUG nova.network.neutron [-] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1695.310798] env[63297]: DEBUG oslo_vmware.api [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150004} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.311115] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1695.311305] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1695.311485] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1695.311656] env[63297]: INFO nova.compute.manager [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Took 1.47 seconds to destroy the instance on the hypervisor. [ 1695.311898] env[63297]: DEBUG oslo.service.loopingcall [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.312093] env[63297]: DEBUG nova.compute.manager [-] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1695.312186] env[63297]: DEBUG nova.network.neutron [-] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1695.416584] env[63297]: DEBUG oslo_vmware.api [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698169, 'name': ReconfigVM_Task, 'duration_secs': 6.049187} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.416873] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.417131] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Reconfigured VM to detach interface {{(pid=63297) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1695.517523] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698187, 'name': Rename_Task, 'duration_secs': 0.334576} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.517836] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1695.518118] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3c8f5b8-98a4-486a-bd9e-ce2f2e9e899b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.525545] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1695.525545] env[63297]: value = "task-1698189" [ 1695.525545] env[63297]: _type = "Task" [ 1695.525545] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.534018] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.622978] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.622978] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.622978] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.623431] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.623431] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.627951] env[63297]: INFO nova.compute.manager [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Terminating instance [ 1695.632725] env[63297]: DEBUG nova.compute.manager [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1695.632962] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1695.633895] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d48dd0-e12c-44a8-a7ed-bf001dbf8827 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.642894] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1695.645113] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9930977a-6fe2-4d88-9444-b3a6c0dc8c34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.653050] env[63297]: DEBUG oslo_vmware.api [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1695.653050] env[63297]: value = "task-1698190" [ 1695.653050] env[63297]: _type = "Task" [ 1695.653050] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.662388] env[63297]: DEBUG oslo_vmware.api [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.718833] env[63297]: DEBUG nova.scheduler.client.report [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1695.747517] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Preparing fetch location {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1695.747769] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Fetch image to [datastore1] OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380/OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380.vmdk {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1695.747952] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Downloading stream optimized image 6d9483fb-3122-4c2b-800b-dca528822bb1 to [datastore1] OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380/OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380.vmdk on the data store datastore1 as vApp {{(pid=63297) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1695.748149] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Downloading image file data 6d9483fb-3122-4c2b-800b-dca528822bb1 to the ESX as VM named 'OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380' {{(pid=63297) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1695.781889] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698188, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.821288] env[63297]: DEBUG nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1695.847099] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1695.847099] env[63297]: value = "resgroup-9" [ 1695.847099] env[63297]: _type = "ResourcePool" [ 1695.847099] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1695.847398] env[63297]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-72206469-cc19-4c57-95a1-8cbb82fdbbe7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.874389] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1695.874389] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1695.874389] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1695.874649] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1695.874649] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1695.874704] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1695.874881] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1695.875063] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1695.875835] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1695.875835] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1695.875835] env[63297]: DEBUG nova.virt.hardware [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1695.879980] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3264da-66f3-4116-9bf3-0eaad22e070d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.893596] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ce69ea-5ba8-4aab-a621-ba482b01b067 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.897936] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lease: (returnval){ [ 1695.897936] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5249eab7-dfa6-21b2-af08-6d461f168f65" [ 1695.897936] env[63297]: _type = "HttpNfcLease" [ 1695.897936] env[63297]: } obtained for vApp import into resource pool (val){ [ 1695.897936] env[63297]: value = "resgroup-9" [ 1695.897936] env[63297]: _type = "ResourcePool" [ 1695.897936] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1695.898893] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the lease: (returnval){ [ 1695.898893] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5249eab7-dfa6-21b2-af08-6d461f168f65" [ 1695.898893] env[63297]: _type = "HttpNfcLease" [ 1695.898893] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1695.920548] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1695.920548] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5249eab7-dfa6-21b2-af08-6d461f168f65" [ 1695.920548] env[63297]: _type = "HttpNfcLease" [ 1695.920548] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1695.937227] env[63297]: DEBUG nova.compute.manager [req-9f9ab86f-12af-4e8a-b5f6-a37d9f962395 req-d6d6a02f-6eac-4685-a72f-bbc82b04fb7a service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-vif-deleted-a5af9937-1640-481b-b998-9090c09fa6e0 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1695.937428] env[63297]: INFO nova.compute.manager [req-9f9ab86f-12af-4e8a-b5f6-a37d9f962395 req-d6d6a02f-6eac-4685-a72f-bbc82b04fb7a service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Neutron deleted interface a5af9937-1640-481b-b998-9090c09fa6e0; detaching it from the instance and deleting it from the info cache [ 1695.937829] env[63297]: DEBUG nova.network.neutron [req-9f9ab86f-12af-4e8a-b5f6-a37d9f962395 req-d6d6a02f-6eac-4685-a72f-bbc82b04fb7a service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bbed31c2-2385-4195-90af-10f04ce61f2f", "address": "fa:16:3e:8e:65:9b", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbed31c2-23", "ovs_interfaceid": "bbed31c2-2385-4195-90af-10f04ce61f2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.967788] env[63297]: DEBUG nova.network.neutron [req-4fb1bb8f-11ab-4009-b373-f4773b6d2697 req-3eff65d3-646e-40df-87f5-9603989de527 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updated VIF entry in instance network info cache for port bb862c99-f006-416a-9b98-0fb287a5d194. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1695.968221] env[63297]: DEBUG nova.network.neutron [req-4fb1bb8f-11ab-4009-b373-f4773b6d2697 req-3eff65d3-646e-40df-87f5-9603989de527 service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [{"id": "bb862c99-f006-416a-9b98-0fb287a5d194", "address": "fa:16:3e:03:4f:8d", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb862c99-f0", "ovs_interfaceid": "bb862c99-f006-416a-9b98-0fb287a5d194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.036877] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698189, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.147608] env[63297]: DEBUG nova.network.neutron [-] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.163372] env[63297]: DEBUG oslo_vmware.api [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698190, 'name': PowerOffVM_Task, 'duration_secs': 0.190362} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.164534] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1696.164726] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1696.165023] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ebf352f-c4cf-4050-8154-e5857b9bb086 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.229092] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.436s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.229299] env[63297]: INFO nova.compute.manager [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Migrating [ 1696.263151] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1696.263362] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1696.263570] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleting the datastore file [datastore1] 1d8c6df5-069f-4647-a2f6-e69a4bf8be94 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1696.264627] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-daf4b4e1-cb39-4c6e-9903-d89130b71a57 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.273012] env[63297]: DEBUG oslo_vmware.api [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1696.273012] env[63297]: value = "task-1698193" [ 1696.273012] env[63297]: _type = "Task" [ 1696.273012] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.280183] env[63297]: DEBUG oslo_vmware.api [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698188, 'name': PowerOnVM_Task, 'duration_secs': 0.634007} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.280794] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1696.280989] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-be7eea13-4a37-4525-8d62-3360f41d1d0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance 'd7dc0672-c908-418e-bfcb-8daa761fba37' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1696.287238] env[63297]: DEBUG oslo_vmware.api [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698193, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.411050] env[63297]: DEBUG nova.network.neutron [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Successfully updated port: c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1696.415352] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1696.415352] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5249eab7-dfa6-21b2-af08-6d461f168f65" [ 1696.415352] env[63297]: _type = "HttpNfcLease" [ 1696.415352] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1696.415352] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1696.415352] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5249eab7-dfa6-21b2-af08-6d461f168f65" [ 1696.415352] env[63297]: _type = "HttpNfcLease" [ 1696.415352] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1696.415352] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebce8c4c-733d-49de-a86f-a9fa4083842d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.425127] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb7afb-5501-cc8e-04f8-a4eb61789c9e/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1696.425437] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating HTTP connection to write to file with size = 31663104 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb7afb-5501-cc8e-04f8-a4eb61789c9e/disk-0.vmdk. {{(pid=63297) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1696.490596] env[63297]: DEBUG oslo_concurrency.lockutils [req-9f9ab86f-12af-4e8a-b5f6-a37d9f962395 req-d6d6a02f-6eac-4685-a72f-bbc82b04fb7a service nova] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.490596] env[63297]: DEBUG oslo_concurrency.lockutils [req-9f9ab86f-12af-4e8a-b5f6-a37d9f962395 req-d6d6a02f-6eac-4685-a72f-bbc82b04fb7a service nova] Acquired lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.490850] env[63297]: DEBUG oslo_concurrency.lockutils [req-4fb1bb8f-11ab-4009-b373-f4773b6d2697 req-3eff65d3-646e-40df-87f5-9603989de527 service nova] Releasing lock "refresh_cache-427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.494420] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf5c4ee-5550-439e-86ba-f30363c68875 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.495755] env[63297]: DEBUG nova.network.neutron [-] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.501155] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cdee9841-16c9-46c0-a734-3acb36c5cfb1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.519922] env[63297]: DEBUG oslo_concurrency.lockutils [req-9f9ab86f-12af-4e8a-b5f6-a37d9f962395 req-d6d6a02f-6eac-4685-a72f-bbc82b04fb7a service nova] Releasing lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.520256] env[63297]: WARNING nova.compute.manager [req-9f9ab86f-12af-4e8a-b5f6-a37d9f962395 req-d6d6a02f-6eac-4685-a72f-bbc82b04fb7a service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Detach interface failed, port_id=a5af9937-1640-481b-b998-9090c09fa6e0, reason: No device with interface-id a5af9937-1640-481b-b998-9090c09fa6e0 exists on VM: nova.exception.NotFound: No device with interface-id a5af9937-1640-481b-b998-9090c09fa6e0 exists on VM [ 1696.538120] env[63297]: DEBUG oslo_vmware.api [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698189, 'name': PowerOnVM_Task, 'duration_secs': 0.52493} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.538416] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1696.538644] env[63297]: INFO nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1696.538878] env[63297]: DEBUG nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1696.539752] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05c1e09-9d75-4c23-bd38-318bdbaf43f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.650600] env[63297]: INFO nova.compute.manager [-] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Took 1.36 seconds to deallocate network for instance. [ 1696.746218] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.746439] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.746648] env[63297]: DEBUG nova.network.neutron [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1696.784531] env[63297]: DEBUG oslo_vmware.api [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698193, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299283} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.786573] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1696.786803] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1696.787754] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1696.787754] env[63297]: INFO nova.compute.manager [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1696.787754] env[63297]: DEBUG oslo.service.loopingcall [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1696.787754] env[63297]: DEBUG nova.compute.manager [-] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1696.787754] env[63297]: DEBUG nova.network.neutron [-] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1696.917376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.917376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.917376] env[63297]: DEBUG nova.network.neutron [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1696.922523] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.922721] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.922941] env[63297]: DEBUG nova.network.neutron [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1697.001812] env[63297]: INFO nova.compute.manager [-] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Took 1.69 seconds to deallocate network for instance. [ 1697.059258] env[63297]: INFO nova.compute.manager [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Took 14.42 seconds to build instance. [ 1697.159321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.159321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.159321] env[63297]: DEBUG nova.objects.instance [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lazy-loading 'resources' on Instance uuid 314c2cd3-6f1d-4d74-ad84-d7cc44375456 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1697.213525] env[63297]: DEBUG nova.compute.manager [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Received event network-vif-deleted-465bbc4d-f40c-47fa-bc69-986670184c65 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1697.213525] env[63297]: DEBUG nova.compute.manager [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Received event network-vif-plugged-c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1697.213717] env[63297]: DEBUG oslo_concurrency.lockutils [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] Acquiring lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.213869] env[63297]: DEBUG oslo_concurrency.lockutils [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.214045] env[63297]: DEBUG oslo_concurrency.lockutils [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.214211] env[63297]: DEBUG nova.compute.manager [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] No waiting events found dispatching network-vif-plugged-c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1697.214375] env[63297]: WARNING nova.compute.manager [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Received unexpected event network-vif-plugged-c31a25f5-7d02-427f-932a-464daf59e755 for instance with vm_state building and task_state spawning. [ 1697.214534] env[63297]: DEBUG nova.compute.manager [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Received event network-changed-c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1697.214744] env[63297]: DEBUG nova.compute.manager [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Refreshing instance network info cache due to event network-changed-c31a25f5-7d02-427f-932a-464daf59e755. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1697.214904] env[63297]: DEBUG oslo_concurrency.lockutils [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] Acquiring lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.479978] env[63297]: DEBUG nova.network.neutron [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1697.511341] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.528885] env[63297]: DEBUG nova.network.neutron [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance_info_cache with network_info: [{"id": "dc0af285-6a18-4cb7-b669-1b9a78865789", "address": "fa:16:3e:f3:6b:12", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc0af285-6a", "ovs_interfaceid": "dc0af285-6a18-4cb7-b669-1b9a78865789", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.538528] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.538781] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.538980] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.539181] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.539364] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.541746] env[63297]: INFO nova.compute.manager [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Terminating instance [ 1697.545027] env[63297]: DEBUG nova.compute.manager [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1697.545230] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1697.547684] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959a65d1-a4fa-4dbe-8e33-46310c92bf3c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.562910] env[63297]: DEBUG oslo_concurrency.lockutils [None req-487a24b3-8abf-43fc-bfc7-0610b2f05e52 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.929s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.564534] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1697.565174] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0075afdb-f962-4f98-8dab-822510d20117 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.572158] env[63297]: DEBUG oslo_vmware.api [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1697.572158] env[63297]: value = "task-1698194" [ 1697.572158] env[63297]: _type = "Task" [ 1697.572158] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.582757] env[63297]: DEBUG oslo_vmware.api [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.771687] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Completed reading data from the image iterator. {{(pid=63297) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1697.771687] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb7afb-5501-cc8e-04f8-a4eb61789c9e/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1697.771687] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d62723-7dd5-4dff-a2e0-bec9596bf475 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.779037] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb7afb-5501-cc8e-04f8-a4eb61789c9e/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1697.779358] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb7afb-5501-cc8e-04f8-a4eb61789c9e/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1697.782059] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c47c4134-6526-4df9-ad37-ffd9dd331d80 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.841395] env[63297]: DEBUG nova.network.neutron [-] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.845269] env[63297]: DEBUG nova.network.neutron [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance_info_cache with network_info: [{"id": "c31a25f5-7d02-427f-932a-464daf59e755", "address": "fa:16:3e:a0:49:93", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc31a25f5-7d", "ovs_interfaceid": "c31a25f5-7d02-427f-932a-464daf59e755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.941030] env[63297]: INFO nova.network.neutron [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Port bbed31c2-2385-4195-90af-10f04ce61f2f from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1697.941428] env[63297]: DEBUG nova.network.neutron [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [{"id": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "address": "fa:16:3e:08:14:f1", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95dd8f-a3", "ovs_interfaceid": "6e95dd8f-a3a6-4449-a572-aba4792afffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.993353] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a50df42-5614-4202-b0e8-c1a99c909166 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.003806] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26688f91-fb33-46a6-9ce4-80a99f3d83a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.037976] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.041262] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c524c2c-c6d7-4b68-a5be-1e24332829c1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.044281] env[63297]: DEBUG oslo_vmware.rw_handles [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb7afb-5501-cc8e-04f8-a4eb61789c9e/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1698.044507] env[63297]: INFO nova.virt.vmwareapi.images [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Downloaded image file data 6d9483fb-3122-4c2b-800b-dca528822bb1 [ 1698.045739] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbfb446-57a0-4b9c-b9ae-588917e446a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.065047] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44830568-1abe-4407-ac93-db8ede831f2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.070526] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98f86ce3-852b-454b-91ce-42289055f1c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.073550] env[63297]: DEBUG nova.compute.manager [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Received event network-vif-deleted-468613e2-02e8-4bf5-9887-fc0f90ff2f75 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1698.073822] env[63297]: DEBUG nova.compute.manager [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-vif-deleted-bbed31c2-2385-4195-90af-10f04ce61f2f {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1698.073906] env[63297]: DEBUG nova.compute.manager [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Received event network-changed-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1698.074060] env[63297]: DEBUG nova.compute.manager [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Refreshing instance network info cache due to event network-changed-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1698.074273] env[63297]: DEBUG oslo_concurrency.lockutils [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] Acquiring lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.074407] env[63297]: DEBUG oslo_concurrency.lockutils [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] Acquired lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.074560] env[63297]: DEBUG nova.network.neutron [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Refreshing network info cache for port 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1698.088696] env[63297]: DEBUG nova.compute.provider_tree [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1698.097173] env[63297]: DEBUG oslo_vmware.api [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698194, 'name': PowerOffVM_Task, 'duration_secs': 0.311602} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.097906] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1698.098193] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1698.098543] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2a5ca81-aff9-4c70-88f7-f3c11d0e32db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.102636] env[63297]: INFO nova.virt.vmwareapi.images [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] The imported VM was unregistered [ 1698.105126] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Caching image {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1698.105359] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating directory with path [datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1 {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1698.106037] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3dc85be-3e8c-4490-8f7c-18f5d907ea8f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.134850] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Created directory with path [datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1 {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1698.135123] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380/OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380.vmdk to [datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1/6d9483fb-3122-4c2b-800b-dca528822bb1.vmdk. {{(pid=63297) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1698.135835] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c2ed6ed8-d754-4f8e-8966-e4cd279c0c89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.143127] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1698.143127] env[63297]: value = "task-1698197" [ 1698.143127] env[63297]: _type = "Task" [ 1698.143127] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.152277] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698197, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.232722] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1698.233019] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1698.233213] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleting the datastore file [datastore1] 66b7a1e5-5e74-49db-99f3-4427d7297bf2 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1698.233478] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce50bc26-8cf6-48d8-a070-d6969f07c59d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.240166] env[63297]: DEBUG oslo_vmware.api [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1698.240166] env[63297]: value = "task-1698198" [ 1698.240166] env[63297]: _type = "Task" [ 1698.240166] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.250858] env[63297]: DEBUG oslo_vmware.api [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.345053] env[63297]: INFO nova.compute.manager [-] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Took 1.56 seconds to deallocate network for instance. [ 1698.349723] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.350290] env[63297]: DEBUG nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Instance network_info: |[{"id": "c31a25f5-7d02-427f-932a-464daf59e755", "address": "fa:16:3e:a0:49:93", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc31a25f5-7d", "ovs_interfaceid": "c31a25f5-7d02-427f-932a-464daf59e755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1698.350655] env[63297]: DEBUG oslo_concurrency.lockutils [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] Acquired lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.350919] env[63297]: DEBUG nova.network.neutron [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Refreshing network info cache for port c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1698.352110] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:49:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e55c248-c504-4c7a-bbe9-f42cf417aee7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c31a25f5-7d02-427f-932a-464daf59e755', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1698.360050] env[63297]: DEBUG oslo.service.loopingcall [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1698.361970] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1698.361970] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64c142dc-a46f-4f40-8483-f3b71aff0a43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.382412] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1698.382412] env[63297]: value = "task-1698199" [ 1698.382412] env[63297]: _type = "Task" [ 1698.382412] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.392077] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698199, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.444165] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-66b7a1e5-5e74-49db-99f3-4427d7297bf2" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.599648] env[63297]: DEBUG nova.scheduler.client.report [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1698.626283] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.626283] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.626283] env[63297]: INFO nova.compute.manager [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Rebooting instance [ 1698.656337] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698197, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.756147] env[63297]: DEBUG oslo_vmware.api [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.864716] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.894333] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698199, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.948453] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3685614c-c530-4246-8448-7ab4c39df0c1 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-66b7a1e5-5e74-49db-99f3-4427d7297bf2-a5af9937-1640-481b-b998-9090c09fa6e0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.631s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.965201] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "d7dc0672-c908-418e-bfcb-8daa761fba37" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.965557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.965814] env[63297]: DEBUG nova.compute.manager [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Going to confirm migration 5 {{(pid=63297) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1698.984181] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.984181] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.984181] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1699.092342] env[63297]: DEBUG nova.network.neutron [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updated VIF entry in instance network info cache for port 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1699.092838] env[63297]: DEBUG nova.network.neutron [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updating instance_info_cache with network_info: [{"id": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "address": "fa:16:3e:7a:3c:bc", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b8e5d7-42", "ovs_interfaceid": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.104840] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.946s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.107465] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.596s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.107757] env[63297]: DEBUG nova.objects.instance [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'resources' on Instance uuid 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1699.131634] env[63297]: INFO nova.scheduler.client.report [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted allocations for instance 314c2cd3-6f1d-4d74-ad84-d7cc44375456 [ 1699.154911] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698197, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.156201] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.173812] env[63297]: DEBUG nova.network.neutron [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updated VIF entry in instance network info cache for port c31a25f5-7d02-427f-932a-464daf59e755. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1699.174284] env[63297]: DEBUG nova.network.neutron [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance_info_cache with network_info: [{"id": "c31a25f5-7d02-427f-932a-464daf59e755", "address": "fa:16:3e:a0:49:93", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc31a25f5-7d", "ovs_interfaceid": "c31a25f5-7d02-427f-932a-464daf59e755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.249560] env[63297]: DEBUG nova.compute.manager [req-839f74a5-fe1a-47a9-b779-35060b2b97c9 req-4d917f08-bd68-488b-928c-7f5dd733dd15 service nova] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Received event network-vif-deleted-8f272d86-3373-42d6-8f0d-94e83e8e6b2c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1699.255810] env[63297]: DEBUG oslo_vmware.api [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.983601} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.256108] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1699.256259] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1699.256463] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1699.256594] env[63297]: INFO nova.compute.manager [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1699.256832] env[63297]: DEBUG oslo.service.loopingcall [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1699.257035] env[63297]: DEBUG nova.compute.manager [-] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1699.257133] env[63297]: DEBUG nova.network.neutron [-] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1699.317191] env[63297]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port bbed31c2-2385-4195-90af-10f04ce61f2f could not be found.", "detail": ""}} {{(pid=63297) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1699.317503] env[63297]: DEBUG nova.network.neutron [-] Unable to show port bbed31c2-2385-4195-90af-10f04ce61f2f as it no longer exists. {{(pid=63297) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1699.394824] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698199, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.527180] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.527792] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.530066] env[63297]: DEBUG nova.network.neutron [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1699.530066] env[63297]: DEBUG nova.objects.instance [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lazy-loading 'info_cache' on Instance uuid d7dc0672-c908-418e-bfcb-8daa761fba37 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1699.561795] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2e9ff9-e88d-4fd3-88c9-76858cc8e7d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.586424] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance 'ffaa1402-5b51-4393-82c7-d9db964edfd3' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1699.595820] env[63297]: DEBUG oslo_concurrency.lockutils [req-4edbe0f9-b1cb-4a67-946c-80fcfec3916c req-7ad3b15e-8c79-4efa-9ea5-66ab24c03250 service nova] Releasing lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1699.596048] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquired lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.596237] env[63297]: DEBUG nova.network.neutron [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1699.641075] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b2170d9e-7476-48d8-809d-7a5cfd3af90c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "314c2cd3-6f1d-4d74-ad84-d7cc44375456" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.480s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.659328] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698197, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.677167] env[63297]: DEBUG oslo_concurrency.lockutils [req-1d76c023-156c-43b1-a1ec-06ba25012355 req-3e5f567f-6cd0-4573-8f59-45d86542c0ac service nova] Releasing lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1699.882596] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973342fa-dc3c-436a-9337-51cc3185721b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.899129] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9455a794-80cc-4a16-bc7a-5220fdb2da35 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.902985] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698199, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.931623] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e470846c-d131-4087-b830-190a3a3fad2f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.940126] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66fbfee-5fec-4df4-b955-9c27f04dea88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.955020] env[63297]: DEBUG nova.compute.provider_tree [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1700.093667] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1700.094019] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10ff4e4d-8270-41ac-85e0-7699641e7cb0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.105245] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1700.105245] env[63297]: value = "task-1698200" [ 1700.105245] env[63297]: _type = "Task" [ 1700.105245] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.118539] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.160895] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698197, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.227728] env[63297]: DEBUG nova.compute.manager [req-4c166d00-b2d1-4b44-b6de-f41163b4d3a7 req-1da50151-0d91-4173-8717-2091e56b67cf service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Received event network-vif-deleted-6e95dd8f-a3a6-4449-a572-aba4792afffe {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1700.227969] env[63297]: INFO nova.compute.manager [req-4c166d00-b2d1-4b44-b6de-f41163b4d3a7 req-1da50151-0d91-4173-8717-2091e56b67cf service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Neutron deleted interface 6e95dd8f-a3a6-4449-a572-aba4792afffe; detaching it from the instance and deleting it from the info cache [ 1700.228191] env[63297]: DEBUG nova.network.neutron [req-4c166d00-b2d1-4b44-b6de-f41163b4d3a7 req-1da50151-0d91-4173-8717-2091e56b67cf service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.242071] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.242453] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.242709] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.243021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.243214] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.245844] env[63297]: INFO nova.compute.manager [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Terminating instance [ 1700.248094] env[63297]: DEBUG nova.compute.manager [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1700.248303] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1700.249529] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0b1694-4ada-440c-a2e1-a509c9b054e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.263868] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1700.264161] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39ec331b-4ed2-451c-92cf-a59b73e292fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.271527] env[63297]: DEBUG oslo_vmware.api [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1700.271527] env[63297]: value = "task-1698201" [ 1700.271527] env[63297]: _type = "Task" [ 1700.271527] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.286312] env[63297]: DEBUG oslo_vmware.api [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698201, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.340958] env[63297]: DEBUG nova.network.neutron [-] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.391443] env[63297]: DEBUG nova.network.neutron [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updating instance_info_cache with network_info: [{"id": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "address": "fa:16:3e:7a:3c:bc", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b8e5d7-42", "ovs_interfaceid": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.401738] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698199, 'name': CreateVM_Task, 'duration_secs': 1.607748} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.403944] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1700.403944] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1700.403944] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1700.403944] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1700.403944] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-461eb7cc-2340-4826-a1e5-feb9b0448727 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.411554] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1700.411554] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5229d9ec-e905-8ec5-7396-54d6b9ad4b59" [ 1700.411554] env[63297]: _type = "Task" [ 1700.411554] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.420793] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5229d9ec-e905-8ec5-7396-54d6b9ad4b59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.422086] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Updating instance_info_cache with network_info: [{"id": "2d2fd3e6-5139-447f-b482-8492601c35f3", "address": "fa:16:3e:51:68:d4", "network": {"id": "e6a7cdc5-1c82-4ca0-b706-7a1271a32962", "bridge": "br-int", "label": "tempest-ServersTestJSON-818015886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33733e0599840618625ecb3e6bb6029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d2fd3e6-51", "ovs_interfaceid": "2d2fd3e6-5139-447f-b482-8492601c35f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.458376] env[63297]: DEBUG nova.scheduler.client.report [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1700.616612] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.659895] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698197, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.733792] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-390e14c8-db20-4037-b22d-85a66e94f45f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.745478] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149e899c-196b-4ad6-a623-ebefecf68a08 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.781014] env[63297]: DEBUG nova.compute.manager [req-4c166d00-b2d1-4b44-b6de-f41163b4d3a7 req-1da50151-0d91-4173-8717-2091e56b67cf service nova] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Detach interface failed, port_id=6e95dd8f-a3a6-4449-a572-aba4792afffe, reason: Instance 66b7a1e5-5e74-49db-99f3-4427d7297bf2 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1700.791503] env[63297]: DEBUG oslo_vmware.api [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698201, 'name': PowerOffVM_Task, 'duration_secs': 0.31901} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.791804] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1700.791998] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1700.792327] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88aaf65b-09d7-48a4-b7fb-1ab61a394451 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.810785] env[63297]: DEBUG nova.network.neutron [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance_info_cache with network_info: [{"id": "51feb81a-d695-4671-800d-b58470af4ae2", "address": "fa:16:3e:55:69:f5", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51feb81a-d6", "ovs_interfaceid": "51feb81a-d695-4671-800d-b58470af4ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.843330] env[63297]: INFO nova.compute.manager [-] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Took 1.59 seconds to deallocate network for instance. [ 1700.876031] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1700.876199] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1700.876782] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleting the datastore file [datastore1] 765f3232-f3f9-4d9b-92f2-fb6603f2a90a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1700.876782] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd9ed4a3-00a7-4c0e-8556-190aa4f6c3c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.884717] env[63297]: DEBUG oslo_vmware.api [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for the task: (returnval){ [ 1700.884717] env[63297]: value = "task-1698203" [ 1700.884717] env[63297]: _type = "Task" [ 1700.884717] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.894873] env[63297]: DEBUG oslo_vmware.api [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.895420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Releasing lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.897461] env[63297]: DEBUG nova.compute.manager [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1700.898311] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1a4165-4f51-4b10-a965-9b9ebc121995 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.921944] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5229d9ec-e905-8ec5-7396-54d6b9ad4b59, 'name': SearchDatastore_Task, 'duration_secs': 0.089762} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.922307] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.922637] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1700.922942] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1700.923144] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1700.924019] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1700.924509] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-765f3232-f3f9-4d9b-92f2-fb6603f2a90a" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.924680] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1700.924876] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a703d187-21a7-46b1-94e0-9c870e390667 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.926777] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.927319] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.927879] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.928064] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.928400] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.928596] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.928758] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1700.928913] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.943416] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1700.943607] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1700.945065] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a623bdc1-b509-425b-92d0-f07e8a617d06 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.950990] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1700.950990] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52446112-3398-5a6e-fd98-95a16ba8e528" [ 1700.950990] env[63297]: _type = "Task" [ 1700.950990] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.959646] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52446112-3398-5a6e-fd98-95a16ba8e528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.963609] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.965939] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.101s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.966194] env[63297]: DEBUG nova.objects.instance [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lazy-loading 'resources' on Instance uuid 1d8c6df5-069f-4647-a2f6-e69a4bf8be94 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.990687] env[63297]: INFO nova.scheduler.client.report [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleted allocations for instance 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f [ 1701.116562] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698200, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.157297] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698197, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.314204] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-d7dc0672-c908-418e-bfcb-8daa761fba37" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.314490] env[63297]: DEBUG nova.objects.instance [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lazy-loading 'migration_context' on Instance uuid d7dc0672-c908-418e-bfcb-8daa761fba37 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1701.350100] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.396147] env[63297]: DEBUG oslo_vmware.api [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.432960] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.461504] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52446112-3398-5a6e-fd98-95a16ba8e528, 'name': SearchDatastore_Task, 'duration_secs': 0.087513} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.462260] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-571fe4ea-ee04-453f-90d3-97a36b4ba431 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.467588] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1701.467588] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525281c8-0a9a-c464-f05c-89b7116bf868" [ 1701.467588] env[63297]: _type = "Task" [ 1701.467588] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.478542] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525281c8-0a9a-c464-f05c-89b7116bf868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.497798] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7b1a3add-1fa9-4ce6-a6d7-51394428bcd3 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.657s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.618809] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698200, 'name': PowerOffVM_Task, 'duration_secs': 1.12169} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.619619] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1701.621209] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance 'ffaa1402-5b51-4393-82c7-d9db964edfd3' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1701.659195] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698197, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.109818} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.661097] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380/OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380.vmdk to [datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1/6d9483fb-3122-4c2b-800b-dca528822bb1.vmdk. [ 1701.661380] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Cleaning up location [datastore1] OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380 {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1701.661433] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_55374440-0410-48b9-a26e-56806882d380 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1701.661882] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdfa4fb6-bb35-4abc-a210-92e84d9a3e0c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.669290] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1701.669290] env[63297]: value = "task-1698204" [ 1701.669290] env[63297]: _type = "Task" [ 1701.669290] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.682624] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.717562] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1525635c-d7ad-466e-9f24-7d8cc7124a64 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.725081] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9551ada-3395-4c05-a988-095ad4cc0ea5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.759805] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f74f47-03a3-4087-8f38-08b806fbaa8a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.766391] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ea7080-6a73-4542-9188-a9274929ab61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.780729] env[63297]: DEBUG nova.compute.provider_tree [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1701.817712] env[63297]: DEBUG nova.objects.base [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1701.818784] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c99362-0225-46e1-bc05-3d5f652daaf7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.842582] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0d2151b-2b05-429c-a7f8-5fbdb70b1895 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.848790] env[63297]: DEBUG oslo_vmware.api [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1701.848790] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528ff80e-8f21-ca22-3332-b399946f2c99" [ 1701.848790] env[63297]: _type = "Task" [ 1701.848790] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.857250] env[63297]: DEBUG oslo_vmware.api [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528ff80e-8f21-ca22-3332-b399946f2c99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.893842] env[63297]: DEBUG oslo_vmware.api [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Task: {'id': task-1698203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.896565} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.894166] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1701.894370] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1701.894548] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1701.894760] env[63297]: INFO nova.compute.manager [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1701.895109] env[63297]: DEBUG oslo.service.loopingcall [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.895241] env[63297]: DEBUG nova.compute.manager [-] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1701.895339] env[63297]: DEBUG nova.network.neutron [-] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1701.913079] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48b4e25-1e1a-4a87-8123-d93b36c4a5e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.919647] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Doing hard reboot of VM {{(pid=63297) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1701.919886] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-06613305-4656-445b-b5b2-38509e83425a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.926188] env[63297]: DEBUG oslo_vmware.api [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1701.926188] env[63297]: value = "task-1698205" [ 1701.926188] env[63297]: _type = "Task" [ 1701.926188] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.935894] env[63297]: DEBUG oslo_vmware.api [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698205, 'name': ResetVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.977932] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525281c8-0a9a-c464-f05c-89b7116bf868, 'name': SearchDatastore_Task, 'duration_secs': 0.073897} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.978477] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.978579] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d/6a99c537-e882-4c8c-b7c3-0861a5c0dc0d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1701.978812] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5aa4c50e-bfe5-445d-bb9b-a8fa0000338f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.984624] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1701.984624] env[63297]: value = "task-1698206" [ 1701.984624] env[63297]: _type = "Task" [ 1701.984624] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.992545] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.127318] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1702.127597] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1702.127734] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1702.127931] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1702.128092] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1702.128240] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1702.128445] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1702.128603] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1702.130916] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1702.130916] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1702.130916] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1702.135412] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee766798-48ce-4a6a-aaca-51f780d6be96 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.153073] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1702.153073] env[63297]: value = "task-1698207" [ 1702.153073] env[63297]: _type = "Task" [ 1702.153073] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.162429] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698207, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.179173] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149663} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.179403] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1702.179605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1/6d9483fb-3122-4c2b-800b-dca528822bb1.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.180209] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1/6d9483fb-3122-4c2b-800b-dca528822bb1.vmdk to [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1/427c4ff0-1bf1-4bfb-b5c6-de6659148ab1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1702.180209] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d36856b-208c-432f-8308-cd3015a46a7b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.187415] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1702.187415] env[63297]: value = "task-1698208" [ 1702.187415] env[63297]: _type = "Task" [ 1702.187415] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.195068] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.251933] env[63297]: DEBUG nova.compute.manager [req-5cee6378-f7aa-45cb-b514-207e30076068 req-d1b6c91b-d49b-41f1-97ec-70ae0107280b service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Received event network-vif-deleted-2d2fd3e6-5139-447f-b482-8492601c35f3 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1702.252157] env[63297]: INFO nova.compute.manager [req-5cee6378-f7aa-45cb-b514-207e30076068 req-d1b6c91b-d49b-41f1-97ec-70ae0107280b service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Neutron deleted interface 2d2fd3e6-5139-447f-b482-8492601c35f3; detaching it from the instance and deleting it from the info cache [ 1702.252332] env[63297]: DEBUG nova.network.neutron [req-5cee6378-f7aa-45cb-b514-207e30076068 req-d1b6c91b-d49b-41f1-97ec-70ae0107280b service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.284194] env[63297]: DEBUG nova.scheduler.client.report [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1702.362081] env[63297]: DEBUG oslo_vmware.api [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528ff80e-8f21-ca22-3332-b399946f2c99, 'name': SearchDatastore_Task, 'duration_secs': 0.021542} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.362081] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.438069] env[63297]: DEBUG oslo_vmware.api [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698205, 'name': ResetVM_Task, 'duration_secs': 0.143979} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.438399] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Did hard reboot of VM {{(pid=63297) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1702.438592] env[63297]: DEBUG nova.compute.manager [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1702.439624] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35aca17-184b-4c68-b69d-2a6cee391375 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.495964] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698206, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.668371] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698207, 'name': ReconfigVM_Task, 'duration_secs': 0.265944} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.668998] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance 'ffaa1402-5b51-4393-82c7-d9db964edfd3' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1702.700617] env[63297]: DEBUG nova.network.neutron [-] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.702269] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.754976] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3caa0749-e995-45e4-83ba-74fa34a1bc28 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.765399] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f3f606-278d-43ce-bc3c-dd7675ac1e31 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.799228] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.801289] env[63297]: DEBUG nova.compute.manager [req-5cee6378-f7aa-45cb-b514-207e30076068 req-d1b6c91b-d49b-41f1-97ec-70ae0107280b service nova] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Detach interface failed, port_id=2d2fd3e6-5139-447f-b482-8492601c35f3, reason: Instance 765f3232-f3f9-4d9b-92f2-fb6603f2a90a could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1702.801973] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.452s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.802381] env[63297]: DEBUG nova.objects.instance [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'resources' on Instance uuid 66b7a1e5-5e74-49db-99f3-4427d7297bf2 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1702.835173] env[63297]: INFO nova.scheduler.client.report [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleted allocations for instance 1d8c6df5-069f-4647-a2f6-e69a4bf8be94 [ 1702.953334] env[63297]: DEBUG oslo_concurrency.lockutils [None req-46a68768-59a0-4f56-b1aa-81e99f3567b8 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.327s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.998461] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737004} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.999609] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d/6a99c537-e882-4c8c-b7c3-0861a5c0dc0d.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1702.999609] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1702.999609] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-976c7c2a-0609-4a2a-b1ef-775ba6e7a300 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.008576] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1703.008576] env[63297]: value = "task-1698209" [ 1703.008576] env[63297]: _type = "Task" [ 1703.008576] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.020662] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.176931] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1703.176931] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1703.177262] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1703.177303] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1703.178033] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1703.178033] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1703.178033] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1703.178033] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1703.178227] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1703.178285] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1703.178501] env[63297]: DEBUG nova.virt.hardware [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1703.184258] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1703.184692] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb4a9813-9633-4bec-97a6-2c9927c03217 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.204730] env[63297]: INFO nova.compute.manager [-] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Took 1.31 seconds to deallocate network for instance. [ 1703.214883] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1703.214883] env[63297]: value = "task-1698210" [ 1703.214883] env[63297]: _type = "Task" [ 1703.214883] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.215140] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698208, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.224377] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698210, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.343325] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e2e60053-cad2-42e2-a205-4e1d833ffd40 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "1d8c6df5-069f-4647-a2f6-e69a4bf8be94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.720s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.521285] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698209, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.545800] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.546027] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.569811] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8a3ec2-d869-412e-96fc-286a4a242d97 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.579219] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c0bf2e-2330-4473-8215-8ebf09d1e544 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.616738] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db958fe-50b6-4d2b-93bb-c4544b859e69 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.625625] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b00265-28a6-4979-b0b4-83a385764c91 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.640631] env[63297]: DEBUG nova.compute.provider_tree [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.710285] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698208, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.720029] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.727866] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698210, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.019999] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.767115} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.020378] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1704.021218] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ce2af3-8da0-4960-a9bf-b997b92ac950 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.046625] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d/6a99c537-e882-4c8c-b7c3-0861a5c0dc0d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1704.047025] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a201412-7e8c-4b70-8747-c8e376b83a26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.062717] env[63297]: DEBUG nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1704.065660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.065922] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.066203] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.066398] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.066564] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.068898] env[63297]: INFO nova.compute.manager [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Terminating instance [ 1704.070529] env[63297]: DEBUG nova.compute.manager [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1704.070724] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1704.071592] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa1960a-4122-42fc-8e62-f996922f9d43 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.076046] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1704.076046] env[63297]: value = "task-1698212" [ 1704.076046] env[63297]: _type = "Task" [ 1704.076046] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.082550] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1704.085797] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57c6d6ed-0d52-4465-8b8a-279121260ff4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.087484] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698212, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.091907] env[63297]: DEBUG oslo_vmware.api [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1704.091907] env[63297]: value = "task-1698213" [ 1704.091907] env[63297]: _type = "Task" [ 1704.091907] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.101029] env[63297]: DEBUG oslo_vmware.api [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.144211] env[63297]: DEBUG nova.scheduler.client.report [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1704.211389] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698208, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.229139] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698210, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.277747] env[63297]: DEBUG nova.compute.manager [req-ba5f6003-5968-4e8f-b931-0051ce5a7def req-b9e1f781-d029-45aa-8002-2a2ba8fdd9c5 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Received event network-changed-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1704.277747] env[63297]: DEBUG nova.compute.manager [req-ba5f6003-5968-4e8f-b931-0051ce5a7def req-b9e1f781-d029-45aa-8002-2a2ba8fdd9c5 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Refreshing instance network info cache due to event network-changed-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1704.277882] env[63297]: DEBUG oslo_concurrency.lockutils [req-ba5f6003-5968-4e8f-b931-0051ce5a7def req-b9e1f781-d029-45aa-8002-2a2ba8fdd9c5 service nova] Acquiring lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.277990] env[63297]: DEBUG oslo_concurrency.lockutils [req-ba5f6003-5968-4e8f-b931-0051ce5a7def req-b9e1f781-d029-45aa-8002-2a2ba8fdd9c5 service nova] Acquired lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.278260] env[63297]: DEBUG nova.network.neutron [req-ba5f6003-5968-4e8f-b931-0051ce5a7def req-b9e1f781-d029-45aa-8002-2a2ba8fdd9c5 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Refreshing network info cache for port 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1704.586244] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.588962] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.600910] env[63297]: DEBUG oslo_vmware.api [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.649522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.847s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.652166] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.219s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.652362] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.652523] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1704.652862] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.292s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.654955] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d2fbb9-ae12-48fd-8b36-cc437ec8b041 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.664542] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b415269a-78bf-481f-b531-558a523b3603 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.671465] env[63297]: INFO nova.scheduler.client.report [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleted allocations for instance 66b7a1e5-5e74-49db-99f3-4427d7297bf2 [ 1704.683564] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eefdf5-9a3f-4ae5-bb80-07d73ef52b7c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.690976] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172b35b9-86ae-4aca-9f9e-dda2b09cfcb4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.725164] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179513MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1704.725531] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.738203] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698208, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.741393] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698210, 'name': ReconfigVM_Task, 'duration_secs': 1.254318} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.741843] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1704.742943] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561cb093-c811-4253-8194-d143ada84a75 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.770660] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] ffaa1402-5b51-4393-82c7-d9db964edfd3/ffaa1402-5b51-4393-82c7-d9db964edfd3.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1704.771050] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14aee95c-b76d-4f55-92e1-0fb97f72560a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.795287] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1704.795287] env[63297]: value = "task-1698214" [ 1704.795287] env[63297]: _type = "Task" [ 1704.795287] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.807381] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698214, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.084226] env[63297]: DEBUG nova.network.neutron [req-ba5f6003-5968-4e8f-b931-0051ce5a7def req-b9e1f781-d029-45aa-8002-2a2ba8fdd9c5 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updated VIF entry in instance network info cache for port 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1705.084603] env[63297]: DEBUG nova.network.neutron [req-ba5f6003-5968-4e8f-b931-0051ce5a7def req-b9e1f781-d029-45aa-8002-2a2ba8fdd9c5 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updating instance_info_cache with network_info: [{"id": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "address": "fa:16:3e:7a:3c:bc", "network": {"id": "b4d7c5a5-3ce8-4260-adec-a4bfc26133b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1616270477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d636a91a492a4f538bc2fc8634f5fa14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b8e5d7-42", "ovs_interfaceid": "54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.089122] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.104188] env[63297]: DEBUG oslo_vmware.api [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698213, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.186683] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b13e3fa7-ca3d-40d1-8f37-ceb3eb9dac25 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "66b7a1e5-5e74-49db-99f3-4427d7297bf2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.647s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.237979] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698208, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.305495] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698214, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.372398] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64696fdc-6247-459e-a5d7-2f7e3d8295e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.380163] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e785822c-7599-4382-b5e1-efd986c3e060 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.414053] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee47c8d-de48-44ef-aacb-dba95013deb5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.421594] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a397923-0e85-405a-a102-091564c05642 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.434668] env[63297]: DEBUG nova.compute.provider_tree [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1705.590912] env[63297]: DEBUG oslo_concurrency.lockutils [req-ba5f6003-5968-4e8f-b931-0051ce5a7def req-b9e1f781-d029-45aa-8002-2a2ba8fdd9c5 service nova] Releasing lock "refresh_cache-5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.591327] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698212, 'name': ReconfigVM_Task, 'duration_secs': 1.402565} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.592089] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d/6a99c537-e882-4c8c-b7c3-0861a5c0dc0d.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1705.592241] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ec4cafa-9d7c-43c0-a242-3221b5e05654 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.604980] env[63297]: DEBUG oslo_vmware.api [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698213, 'name': PowerOffVM_Task, 'duration_secs': 1.053393} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.606247] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1705.606443] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1705.606744] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1705.606744] env[63297]: value = "task-1698215" [ 1705.606744] env[63297]: _type = "Task" [ 1705.606744] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.606930] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-586469af-5650-4de6-973b-ee95eb5f5f07 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.616178] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698215, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.688879] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1705.689163] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1705.689358] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Deleting the datastore file [datastore1] 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1705.689632] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b85cfff-0296-4343-b786-69a85797e1f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.695992] env[63297]: DEBUG oslo_vmware.api [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1705.695992] env[63297]: value = "task-1698217" [ 1705.695992] env[63297]: _type = "Task" [ 1705.695992] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.704161] env[63297]: DEBUG oslo_vmware.api [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.736682] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698208, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.090125} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.736990] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6d9483fb-3122-4c2b-800b-dca528822bb1/6d9483fb-3122-4c2b-800b-dca528822bb1.vmdk to [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1/427c4ff0-1bf1-4bfb-b5c6-de6659148ab1.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1705.737791] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618f3ea7-453f-4018-a4b0-a58365d533e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.761796] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1/427c4ff0-1bf1-4bfb-b5c6-de6659148ab1.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1705.762098] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69bf8fd6-d95e-4b2b-a3f6-a10495fd7a4b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.780562] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1705.780562] env[63297]: value = "task-1698218" [ 1705.780562] env[63297]: _type = "Task" [ 1705.780562] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.788183] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698218, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.805477] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698214, 'name': ReconfigVM_Task, 'duration_secs': 0.757285} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.805986] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Reconfigured VM instance instance-00000068 to attach disk [datastore1] ffaa1402-5b51-4393-82c7-d9db964edfd3/ffaa1402-5b51-4393-82c7-d9db964edfd3.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1705.806144] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance 'ffaa1402-5b51-4393-82c7-d9db964edfd3' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1705.938565] env[63297]: DEBUG nova.scheduler.client.report [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1706.117676] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698215, 'name': Rename_Task, 'duration_secs': 0.17191} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.118412] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1706.120670] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ad7d6fa-e0f3-4825-8fb1-c85234d0fa2c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.125615] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1706.125615] env[63297]: value = "task-1698219" [ 1706.125615] env[63297]: _type = "Task" [ 1706.125615] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.134241] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698219, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.206069] env[63297]: DEBUG oslo_vmware.api [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179765} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.206069] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1706.206158] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1706.206291] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1706.206452] env[63297]: INFO nova.compute.manager [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Took 2.14 seconds to destroy the instance on the hypervisor. [ 1706.206697] env[63297]: DEBUG oslo.service.loopingcall [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1706.206895] env[63297]: DEBUG nova.compute.manager [-] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1706.206991] env[63297]: DEBUG nova.network.neutron [-] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1706.290941] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698218, 'name': ReconfigVM_Task, 'duration_secs': 0.307} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.292089] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1/427c4ff0-1bf1-4bfb-b5c6-de6659148ab1.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1706.292674] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'disk_bus': None, 'encryption_format': None, 'device_name': '/dev/sda', 'size': 0, 'encrypted': False, 'guest_format': None, 'device_type': 'disk', 'boot_index': 0, 'encryption_options': None, 'image_id': '41f1ad71-37f2-4e86-a900-da4965eba44f'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354007', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'name': 'volume-4890cca6-2688-4f8a-89b5-29f10002cfe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '427c4ff0-1bf1-4bfb-b5c6-de6659148ab1', 'attached_at': '', 'detached_at': '', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'serial': '4890cca6-2688-4f8a-89b5-29f10002cfe8'}, 'mount_device': '/dev/sdb', 'disk_bus': None, 'attachment_id': '488b7a58-aceb-4693-a62e-0b9229c954f9', 'guest_format': None, 'device_type': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=63297) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1706.292926] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1706.293094] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354007', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'name': 'volume-4890cca6-2688-4f8a-89b5-29f10002cfe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '427c4ff0-1bf1-4bfb-b5c6-de6659148ab1', 'attached_at': '', 'detached_at': '', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'serial': '4890cca6-2688-4f8a-89b5-29f10002cfe8'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1706.293957] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3214a65f-9437-4792-8b8e-ee19ead241b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.309514] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3bd3da-2306-477c-9925-04170a9efefe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.314630] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef86122-45b8-4da2-9584-4164fe232372 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.345291] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37187f12-cfcb-453d-a697-f4f79349bbab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.355670] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] volume-4890cca6-2688-4f8a-89b5-29f10002cfe8/volume-4890cca6-2688-4f8a-89b5-29f10002cfe8.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1706.355987] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec9c461a-dce8-4e91-8f42-1110a9dd4a63 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.385983] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance 'ffaa1402-5b51-4393-82c7-d9db964edfd3' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1706.390289] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1706.390289] env[63297]: value = "task-1698220" [ 1706.390289] env[63297]: _type = "Task" [ 1706.390289] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.400028] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698220, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.436467] env[63297]: DEBUG nova.compute.manager [req-85459f5e-27e8-497f-b475-f2cc5bdc11a6 req-6aa9146e-8228-4075-b57c-c6845012dcc3 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Received event network-vif-deleted-54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1706.436665] env[63297]: INFO nova.compute.manager [req-85459f5e-27e8-497f-b475-f2cc5bdc11a6 req-6aa9146e-8228-4075-b57c-c6845012dcc3 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Neutron deleted interface 54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d; detaching it from the instance and deleting it from the info cache [ 1706.436839] env[63297]: DEBUG nova.network.neutron [req-85459f5e-27e8-497f-b475-f2cc5bdc11a6 req-6aa9146e-8228-4075-b57c-c6845012dcc3 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.636490] env[63297]: DEBUG oslo_vmware.api [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698219, 'name': PowerOnVM_Task, 'duration_secs': 0.496277} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.636744] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1706.637057] env[63297]: INFO nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Took 10.82 seconds to spawn the instance on the hypervisor. [ 1706.637294] env[63297]: DEBUG nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1706.638079] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fab7e8-9353-4c2a-94a5-b5d37dff2231 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.903892] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698220, 'name': ReconfigVM_Task, 'duration_secs': 0.314106} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.904254] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Reconfigured VM instance instance-0000005e to attach disk [datastore1] volume-4890cca6-2688-4f8a-89b5-29f10002cfe8/volume-4890cca6-2688-4f8a-89b5-29f10002cfe8.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1706.908866] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbbf2757-464a-4551-93e2-27c3df4f3de9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.918527] env[63297]: DEBUG nova.network.neutron [-] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.925286] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1706.925286] env[63297]: value = "task-1698221" [ 1706.925286] env[63297]: _type = "Task" [ 1706.925286] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.934339] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698221, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.938834] env[63297]: DEBUG nova.network.neutron [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Port dc0af285-6a18-4cb7-b669-1b9a78865789 binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1706.940489] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9db7c2c-e1a5-4604-b68e-a064c7692196 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.948989] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f783dfd-ddcb-490f-a4be-af38d84f7abb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.961938] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.309s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.965785] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.248s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.966056] env[63297]: DEBUG nova.objects.instance [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lazy-loading 'resources' on Instance uuid 765f3232-f3f9-4d9b-92f2-fb6603f2a90a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1706.990831] env[63297]: DEBUG nova.compute.manager [req-85459f5e-27e8-497f-b475-f2cc5bdc11a6 req-6aa9146e-8228-4075-b57c-c6845012dcc3 service nova] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Detach interface failed, port_id=54b8e5d7-4206-49f4-a5bf-e0405d6a8d9d, reason: Instance 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1707.112201] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.112298] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.157070] env[63297]: INFO nova.compute.manager [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Took 18.68 seconds to build instance. [ 1707.421155] env[63297]: INFO nova.compute.manager [-] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Took 1.21 seconds to deallocate network for instance. [ 1707.434760] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698221, 'name': ReconfigVM_Task, 'duration_secs': 0.152854} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.435054] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354007', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'name': 'volume-4890cca6-2688-4f8a-89b5-29f10002cfe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '427c4ff0-1bf1-4bfb-b5c6-de6659148ab1', 'attached_at': '', 'detached_at': '', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'serial': '4890cca6-2688-4f8a-89b5-29f10002cfe8'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1707.435596] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e27f0c86-5aeb-454c-9cda-27a6a3310746 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.441449] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1707.441449] env[63297]: value = "task-1698222" [ 1707.441449] env[63297]: _type = "Task" [ 1707.441449] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.459103] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698222, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.526103] env[63297]: INFO nova.scheduler.client.report [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted allocation for migration 51930a5f-a62a-41e1-9334-d43d3d7caf8f [ 1707.614552] env[63297]: DEBUG nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1707.659030] env[63297]: DEBUG oslo_concurrency.lockutils [None req-17d5b118-c52b-4c86-a7bf-236c9ef7f49a tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.190s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.706735] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b401296b-04d8-4c0c-8adb-115498d335ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.714337] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4dd36f-c2f4-4bc7-9e9e-f5c089f57b85 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.719546] env[63297]: DEBUG nova.compute.manager [req-4e12120d-955f-41ac-bdc6-96342b20b3ff req-0c7f0a51-1fa0-4e50-bf78-3d3a5d7f8be0 service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Received event network-changed-c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1707.719743] env[63297]: DEBUG nova.compute.manager [req-4e12120d-955f-41ac-bdc6-96342b20b3ff req-0c7f0a51-1fa0-4e50-bf78-3d3a5d7f8be0 service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Refreshing instance network info cache due to event network-changed-c31a25f5-7d02-427f-932a-464daf59e755. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1707.719978] env[63297]: DEBUG oslo_concurrency.lockutils [req-4e12120d-955f-41ac-bdc6-96342b20b3ff req-0c7f0a51-1fa0-4e50-bf78-3d3a5d7f8be0 service nova] Acquiring lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.720157] env[63297]: DEBUG oslo_concurrency.lockutils [req-4e12120d-955f-41ac-bdc6-96342b20b3ff req-0c7f0a51-1fa0-4e50-bf78-3d3a5d7f8be0 service nova] Acquired lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.720268] env[63297]: DEBUG nova.network.neutron [req-4e12120d-955f-41ac-bdc6-96342b20b3ff req-0c7f0a51-1fa0-4e50-bf78-3d3a5d7f8be0 service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Refreshing network info cache for port c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1707.751299] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165585ea-e32e-4235-9abe-4e91271a5fef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.762704] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2b3dfc-ef07-43b7-a59c-6e102d8d7893 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.774459] env[63297]: DEBUG nova.compute.provider_tree [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1707.931332] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.959351] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698222, 'name': Rename_Task, 'duration_secs': 0.158547} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.961489] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1707.962731] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f421f6b9-b54b-4a0d-8c4a-fa32fa430dbe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.968187] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.968404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.968579] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.975228] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1707.975228] env[63297]: value = "task-1698223" [ 1707.975228] env[63297]: _type = "Task" [ 1707.975228] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.985704] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698223, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.035881] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5093841f-67ad-4ebc-aa90-efe5a54d3e2d tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.070s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.137154] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.279995] env[63297]: DEBUG nova.scheduler.client.report [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1708.284321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.286033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.286033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.286033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.286033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.287835] env[63297]: INFO nova.compute.manager [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Terminating instance [ 1708.290659] env[63297]: DEBUG nova.compute.manager [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1708.290849] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1708.291699] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2723da-bb9a-4a61-8c90-b039df01513a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.039375] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "d7dc0672-c908-418e-bfcb-8daa761fba37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.039375] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.039863] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.039863] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.039863] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.044160] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.077s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.044968] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1709.050926] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.462s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.052427] env[63297]: INFO nova.compute.claims [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1709.055022] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ad9b091-8462-4b35-954b-dd6d4092483b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.056866] env[63297]: INFO nova.compute.manager [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Terminating instance [ 1709.059398] env[63297]: DEBUG nova.compute.manager [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1709.059589] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1709.061031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.061556] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.063087] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504f2a26-fbb1-4e65-b201-111e872d87e0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.069284] env[63297]: DEBUG oslo_vmware.api [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698223, 'name': PowerOnVM_Task, 'duration_secs': 0.651995} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.070880] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1709.072737] env[63297]: DEBUG oslo_vmware.api [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1709.072737] env[63297]: value = "task-1698224" [ 1709.072737] env[63297]: _type = "Task" [ 1709.072737] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.075974] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1709.076879] env[63297]: INFO nova.scheduler.client.report [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Deleted allocations for instance 765f3232-f3f9-4d9b-92f2-fb6603f2a90a [ 1709.081067] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-944ba3a1-380b-4c1d-998c-eefe3a4ec86b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.090672] env[63297]: DEBUG oslo_vmware.api [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.092139] env[63297]: DEBUG oslo_vmware.api [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1709.092139] env[63297]: value = "task-1698225" [ 1709.092139] env[63297]: _type = "Task" [ 1709.092139] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.101180] env[63297]: DEBUG oslo_vmware.api [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.195924] env[63297]: DEBUG nova.compute.manager [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1709.199342] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c80ebd-8bac-468b-a4f4-fad52fd1ab1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.202088] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.202329] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.241874] env[63297]: DEBUG nova.network.neutron [req-4e12120d-955f-41ac-bdc6-96342b20b3ff req-0c7f0a51-1fa0-4e50-bf78-3d3a5d7f8be0 service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updated VIF entry in instance network info cache for port c31a25f5-7d02-427f-932a-464daf59e755. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1709.242344] env[63297]: DEBUG nova.network.neutron [req-4e12120d-955f-41ac-bdc6-96342b20b3ff req-0c7f0a51-1fa0-4e50-bf78-3d3a5d7f8be0 service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance_info_cache with network_info: [{"id": "c31a25f5-7d02-427f-932a-464daf59e755", "address": "fa:16:3e:a0:49:93", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc31a25f5-7d", "ovs_interfaceid": "c31a25f5-7d02-427f-932a-464daf59e755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1709.571019] env[63297]: DEBUG nova.compute.utils [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1709.580444] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.580625] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.580796] env[63297]: DEBUG nova.network.neutron [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1709.590064] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8262db69-8356-42dd-9558-8303f73ab71c tempest-ServersTestJSON-1972465365 tempest-ServersTestJSON-1972465365-project-member] Lock "765f3232-f3f9-4d9b-92f2-fb6603f2a90a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.348s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.591364] env[63297]: DEBUG oslo_vmware.api [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698224, 'name': PowerOffVM_Task, 'duration_secs': 0.256627} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.592118] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1709.592296] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1709.592645] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61d021fa-f84a-412f-983e-4cea4a5e5d6c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.605701] env[63297]: DEBUG oslo_vmware.api [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698225, 'name': PowerOffVM_Task, 'duration_secs': 0.259408} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.605931] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1709.606111] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1709.606348] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcdf7a1c-1794-4ac5-8520-e1fde35d419b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.695565] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1709.695767] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1709.697150] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleting the datastore file [datastore1] d7dc0672-c908-418e-bfcb-8daa761fba37 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1709.697150] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a2a17a5-7343-414a-8e62-c46092502016 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.699594] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1709.699787] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1709.699958] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleting the datastore file [datastore1] b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1709.700569] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c8850cd-3584-404f-ba1d-db7ff2306b44 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.704436] env[63297]: DEBUG oslo_vmware.api [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1709.704436] env[63297]: value = "task-1698228" [ 1709.704436] env[63297]: _type = "Task" [ 1709.704436] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.707759] env[63297]: DEBUG nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1709.711975] env[63297]: DEBUG oslo_vmware.api [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1709.711975] env[63297]: value = "task-1698229" [ 1709.711975] env[63297]: _type = "Task" [ 1709.711975] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.724982] env[63297]: DEBUG oslo_vmware.api [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.724982] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c5c23b45-68f5-480b-991c-0ff77c315a09 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 28.903s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.735665] env[63297]: DEBUG oslo_vmware.api [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.745786] env[63297]: DEBUG oslo_concurrency.lockutils [req-4e12120d-955f-41ac-bdc6-96342b20b3ff req-0c7f0a51-1fa0-4e50-bf78-3d3a5d7f8be0 service nova] Releasing lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.074566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.217321] env[63297]: DEBUG oslo_vmware.api [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.229516] env[63297]: DEBUG oslo_vmware.api [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.238495] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.308530] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b506b853-9344-4434-8354-04481bbd8f58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.316478] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6c18d3-3636-415e-a065-1fd3e204c7f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.358064] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7681d6-691e-4bcf-b53a-a0da9b9d27ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.368014] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a12a0c-24a4-4440-bb7c-d41651052452 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.382569] env[63297]: DEBUG nova.compute.provider_tree [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1710.416993] env[63297]: DEBUG nova.network.neutron [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance_info_cache with network_info: [{"id": "dc0af285-6a18-4cb7-b669-1b9a78865789", "address": "fa:16:3e:f3:6b:12", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc0af285-6a", "ovs_interfaceid": "dc0af285-6a18-4cb7-b669-1b9a78865789", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.718950] env[63297]: DEBUG oslo_vmware.api [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.726897] env[63297]: DEBUG oslo_vmware.api [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.827109] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.827394] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.827632] env[63297]: INFO nova.compute.manager [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Attaching volume 3a1d595a-6c94-4766-b9a2-b46ebcc6b92c to /dev/sdb [ 1710.868098] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d1d73e-ca2a-435c-832c-a3024638d462 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.875725] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9254473-b9f7-4407-b64a-b447bdd21ee9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.886012] env[63297]: DEBUG nova.scheduler.client.report [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1710.895096] env[63297]: DEBUG nova.virt.block_device [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Updating existing volume attachment record: 8e559150-6e58-418c-8cd0-4818a5399ecb {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1710.919417] env[63297]: DEBUG oslo_concurrency.lockutils [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.216498] env[63297]: DEBUG oslo_vmware.api [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.360365} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.216807] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1711.217233] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1711.217233] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1711.217473] env[63297]: INFO nova.compute.manager [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Took 2.16 seconds to destroy the instance on the hypervisor. [ 1711.217555] env[63297]: DEBUG oslo.service.loopingcall [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1711.217704] env[63297]: DEBUG nova.compute.manager [-] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1711.217798] env[63297]: DEBUG nova.network.neutron [-] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1711.230018] env[63297]: DEBUG oslo_vmware.api [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.41172} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.230018] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1711.230018] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1711.230018] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1711.230018] env[63297]: INFO nova.compute.manager [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Took 2.94 seconds to destroy the instance on the hypervisor. [ 1711.230018] env[63297]: DEBUG oslo.service.loopingcall [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1711.230018] env[63297]: DEBUG nova.compute.manager [-] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1711.230018] env[63297]: DEBUG nova.network.neutron [-] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1711.399222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.399222] env[63297]: DEBUG nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1711.402643] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.677s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.457870] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff37b8a8-6e2a-4479-a14c-c5826a69c6aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.488885] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5a2f3d-abf4-4426-86fe-7b645e692601 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.496965] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance 'ffaa1402-5b51-4393-82c7-d9db964edfd3' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1711.584723] env[63297]: DEBUG nova.compute.manager [req-e0c319d5-f73e-4367-ae79-a5ecf480f1f2 req-16ec9ad6-a84e-49c4-ae0e-de04e102e31c service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Received event network-vif-deleted-51feb81a-d695-4671-800d-b58470af4ae2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1711.584961] env[63297]: INFO nova.compute.manager [req-e0c319d5-f73e-4367-ae79-a5ecf480f1f2 req-16ec9ad6-a84e-49c4-ae0e-de04e102e31c service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Neutron deleted interface 51feb81a-d695-4671-800d-b58470af4ae2; detaching it from the instance and deleting it from the info cache [ 1711.585380] env[63297]: DEBUG nova.network.neutron [req-e0c319d5-f73e-4367-ae79-a5ecf480f1f2 req-16ec9ad6-a84e-49c4-ae0e-de04e102e31c service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.666017] env[63297]: DEBUG nova.compute.manager [req-e4e78860-1a0c-4a1a-a0a3-597ff36849df req-f895e1d5-01e8-47cd-9345-a6e351972440 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Received event network-vif-deleted-b7115a0d-4014-408b-b05e-52f08768ec9e {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1711.666554] env[63297]: INFO nova.compute.manager [req-e4e78860-1a0c-4a1a-a0a3-597ff36849df req-f895e1d5-01e8-47cd-9345-a6e351972440 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Neutron deleted interface b7115a0d-4014-408b-b05e-52f08768ec9e; detaching it from the instance and deleting it from the info cache [ 1711.666975] env[63297]: DEBUG nova.network.neutron [req-e4e78860-1a0c-4a1a-a0a3-597ff36849df req-f895e1d5-01e8-47cd-9345-a6e351972440 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.906701] env[63297]: DEBUG nova.compute.utils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1711.912734] env[63297]: DEBUG nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1711.912734] env[63297]: DEBUG nova.network.neutron [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1711.972944] env[63297]: DEBUG nova.policy [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ce4e0757c584ebdb556c79d3c0bd990', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2786fb3158214107a458dc08735ebeb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1712.003394] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1712.007867] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4283d02b-c12e-4b12-92d8-e168c82eae46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.015466] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1712.015466] env[63297]: value = "task-1698233" [ 1712.015466] env[63297]: _type = "Task" [ 1712.015466] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.025329] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698233, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.057923] env[63297]: DEBUG nova.network.neutron [-] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.088218] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-949efa26-8bc8-4e73-96f7-521e0f3abe12 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.099345] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0285d1f-ff4e-4e18-8f81-fcbb1b08ef29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.132238] env[63297]: DEBUG nova.compute.manager [req-e0c319d5-f73e-4367-ae79-a5ecf480f1f2 req-16ec9ad6-a84e-49c4-ae0e-de04e102e31c service nova] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Detach interface failed, port_id=51feb81a-d695-4671-800d-b58470af4ae2, reason: Instance d7dc0672-c908-418e-bfcb-8daa761fba37 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1712.138944] env[63297]: DEBUG nova.network.neutron [-] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.170125] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33290f8d-8448-454a-a6b3-ba49d62d4829 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.180043] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1bc9b4-8753-4db7-8fc9-59cf2c320cc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.211307] env[63297]: DEBUG nova.compute.manager [req-e4e78860-1a0c-4a1a-a0a3-597ff36849df req-f895e1d5-01e8-47cd-9345-a6e351972440 service nova] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Detach interface failed, port_id=b7115a0d-4014-408b-b05e-52f08768ec9e, reason: Instance b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1712.249605] env[63297]: DEBUG nova.network.neutron [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Successfully created port: 191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1712.426141] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Applying migration context for instance ffaa1402-5b51-4393-82c7-d9db964edfd3 as it has an incoming, in-progress migration e3bc88a1-09ba-4143-9085-38694740e952. Migration status is post-migrating {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1712.431169] env[63297]: INFO nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating resource usage from migration e3bc88a1-09ba-4143-9085-38694740e952 [ 1712.440472] env[63297]: DEBUG nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1712.468630] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.468783] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.468939] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 42d872d6-da12-474b-8741-1d991d507cfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.469166] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.469205] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance f87867c3-58d4-4bd6-b6ef-1608ebef6b22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.469287] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 3ab25962-2150-4331-a018-aa61bd082814 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.469800] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance d7dc0672-c908-418e-bfcb-8daa761fba37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.469800] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.469800] env[63297]: WARNING nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1712.469800] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.470147] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Migration e3bc88a1-09ba-4143-9085-38694740e952 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1712.470147] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ffaa1402-5b51-4393-82c7-d9db964edfd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.470147] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5c0eefd2-69d4-4100-93b9-d6265c28c7be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.526265] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698233, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.561438] env[63297]: INFO nova.compute.manager [-] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Took 1.34 seconds to deallocate network for instance. [ 1712.642607] env[63297]: INFO nova.compute.manager [-] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Took 1.41 seconds to deallocate network for instance. [ 1712.978142] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance c1696ee9-cb48-414c-b0a0-b6fa2e880a81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1713.026451] env[63297]: DEBUG oslo_vmware.api [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698233, 'name': PowerOnVM_Task, 'duration_secs': 0.596104} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.026727] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1713.026910] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-476973b8-147d-4b0e-a6ce-ff503bdd970a tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance 'ffaa1402-5b51-4393-82c7-d9db964edfd3' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1713.068156] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.150046] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.450976] env[63297]: DEBUG nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1713.477356] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1713.477655] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1713.477851] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1713.478218] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1713.478291] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1713.478526] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1713.478814] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1713.479036] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1713.479252] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1713.479468] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1713.479684] env[63297]: DEBUG nova.virt.hardware [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1713.480940] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 37a4719c-20b4-4cb3-b8fc-bfa28b906799 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1713.480940] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1713.480940] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2944MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1713.483705] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7065969f-521d-46e6-837a-9f2313e898dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.492363] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39db968d-7be0-48c7-86e1-ebf326ce3b58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.681686] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f60370-a328-435c-a04b-4b9709259675 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.691294] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec76c0a9-de20-4ab2-9131-db7d66d84ac1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.695220] env[63297]: DEBUG nova.compute.manager [req-0b47f00f-58b0-4384-84c1-e1a04bb3d8bf req-8e69114b-ff84-4964-aa56-3961a465b201 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Received event network-vif-plugged-191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1713.695479] env[63297]: DEBUG oslo_concurrency.lockutils [req-0b47f00f-58b0-4384-84c1-e1a04bb3d8bf req-8e69114b-ff84-4964-aa56-3961a465b201 service nova] Acquiring lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.695697] env[63297]: DEBUG oslo_concurrency.lockutils [req-0b47f00f-58b0-4384-84c1-e1a04bb3d8bf req-8e69114b-ff84-4964-aa56-3961a465b201 service nova] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.695859] env[63297]: DEBUG oslo_concurrency.lockutils [req-0b47f00f-58b0-4384-84c1-e1a04bb3d8bf req-8e69114b-ff84-4964-aa56-3961a465b201 service nova] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.696038] env[63297]: DEBUG nova.compute.manager [req-0b47f00f-58b0-4384-84c1-e1a04bb3d8bf req-8e69114b-ff84-4964-aa56-3961a465b201 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] No waiting events found dispatching network-vif-plugged-191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1713.696208] env[63297]: WARNING nova.compute.manager [req-0b47f00f-58b0-4384-84c1-e1a04bb3d8bf req-8e69114b-ff84-4964-aa56-3961a465b201 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Received unexpected event network-vif-plugged-191de685-dee5-4eac-944a-940a39615f0c for instance with vm_state building and task_state spawning. [ 1713.723448] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7115e506-d400-427e-9244-408f71d3c5b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.730781] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c628c000-8a5e-4d56-9ca2-d8fb51024121 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.744423] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1713.773794] env[63297]: DEBUG nova.network.neutron [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Successfully updated port: 191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1714.263804] env[63297]: ERROR nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [req-53da56d2-2b63-4a4a-8e38-7bb3ab1e4b44] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-53da56d2-2b63-4a4a-8e38-7bb3ab1e4b44"}]} [ 1714.279238] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1714.281073] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.281201] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.281390] env[63297]: DEBUG nova.network.neutron [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1714.294163] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1714.294444] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1714.305626] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1714.322958] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1714.476471] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b37b5b-40aa-432d-a672-3431d7a6a1e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.483850] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96bc28b3-db39-4cfd-b19f-fa270d0fb432 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.513563] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5a9e86-599c-4c71-986c-7e25d5214ff2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.522078] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dcd6e7-98a0-45d2-b782-7d626e9c9ce6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.535434] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1714.814103] env[63297]: DEBUG nova.network.neutron [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1714.966043] env[63297]: DEBUG nova.network.neutron [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Updating instance_info_cache with network_info: [{"id": "191de685-dee5-4eac-944a-940a39615f0c", "address": "fa:16:3e:20:95:37", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap191de685-de", "ovs_interfaceid": "191de685-dee5-4eac-944a-940a39615f0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.068030] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 155 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1715.068290] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 155 to 156 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1715.068444] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1715.469148] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.469473] env[63297]: DEBUG nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Instance network_info: |[{"id": "191de685-dee5-4eac-944a-940a39615f0c", "address": "fa:16:3e:20:95:37", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap191de685-de", "ovs_interfaceid": "191de685-dee5-4eac-944a-940a39615f0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1715.470216] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:95:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '191de685-dee5-4eac-944a-940a39615f0c', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1715.477707] env[63297]: DEBUG oslo.service.loopingcall [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1715.477999] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1715.478147] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b15ca1d9-fef7-42dd-aaf2-e9b9fb08cbb3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.498709] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1715.498709] env[63297]: value = "task-1698235" [ 1715.498709] env[63297]: _type = "Task" [ 1715.498709] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.506845] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698235, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.579465] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1715.580088] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.178s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.580088] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.649s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.580202] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.582454] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.445s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.584211] env[63297]: INFO nova.compute.claims [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1715.605746] env[63297]: INFO nova.scheduler.client.report [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Deleted allocations for instance 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886 [ 1715.701418] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.701678] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.701863] env[63297]: DEBUG nova.compute.manager [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Going to confirm migration 6 {{(pid=63297) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1715.735386] env[63297]: DEBUG nova.compute.manager [req-569727f7-e3e9-4238-8f1d-7ce3d921f851 req-d67529d8-b9c5-46f9-8065-43ea916c0500 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Received event network-changed-191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1715.735682] env[63297]: DEBUG nova.compute.manager [req-569727f7-e3e9-4238-8f1d-7ce3d921f851 req-d67529d8-b9c5-46f9-8065-43ea916c0500 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Refreshing instance network info cache due to event network-changed-191de685-dee5-4eac-944a-940a39615f0c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1715.735953] env[63297]: DEBUG oslo_concurrency.lockutils [req-569727f7-e3e9-4238-8f1d-7ce3d921f851 req-d67529d8-b9c5-46f9-8065-43ea916c0500 service nova] Acquiring lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.736118] env[63297]: DEBUG oslo_concurrency.lockutils [req-569727f7-e3e9-4238-8f1d-7ce3d921f851 req-d67529d8-b9c5-46f9-8065-43ea916c0500 service nova] Acquired lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.736293] env[63297]: DEBUG nova.network.neutron [req-569727f7-e3e9-4238-8f1d-7ce3d921f851 req-d67529d8-b9c5-46f9-8065-43ea916c0500 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Refreshing network info cache for port 191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1715.941546] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1715.941806] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1715.942758] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f88e8f1-6989-4c27-a332-95b7802d2cff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.959354] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d68a72-7e6c-42ae-a0f1-0fc29808936b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.985032] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c/volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1715.985283] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1663bfa7-c993-456b-8d72-db366cef853a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.005373] env[63297]: DEBUG oslo_vmware.api [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1716.005373] env[63297]: value = "task-1698236" [ 1716.005373] env[63297]: _type = "Task" [ 1716.005373] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.013157] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698235, 'name': CreateVM_Task, 'duration_secs': 0.362413} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.013559] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1716.014198] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.014355] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.014722] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1716.014953] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5d81f50-cf4b-42c6-9043-7dd857322dcf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.019401] env[63297]: DEBUG oslo_vmware.api [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698236, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.021959] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1716.021959] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5223079e-1cda-051a-e61b-600003af7d4a" [ 1716.021959] env[63297]: _type = "Task" [ 1716.021959] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.029488] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5223079e-1cda-051a-e61b-600003af7d4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.113975] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b9fb9804-a394-4a21-b401-9f43963c1f9c tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.048s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.241708] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.241884] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquired lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.242077] env[63297]: DEBUG nova.network.neutron [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1716.242262] env[63297]: DEBUG nova.objects.instance [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'info_cache' on Instance uuid ffaa1402-5b51-4393-82c7-d9db964edfd3 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1716.446460] env[63297]: DEBUG nova.network.neutron [req-569727f7-e3e9-4238-8f1d-7ce3d921f851 req-d67529d8-b9c5-46f9-8065-43ea916c0500 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Updated VIF entry in instance network info cache for port 191de685-dee5-4eac-944a-940a39615f0c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1716.446867] env[63297]: DEBUG nova.network.neutron [req-569727f7-e3e9-4238-8f1d-7ce3d921f851 req-d67529d8-b9c5-46f9-8065-43ea916c0500 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Updating instance_info_cache with network_info: [{"id": "191de685-dee5-4eac-944a-940a39615f0c", "address": "fa:16:3e:20:95:37", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap191de685-de", "ovs_interfaceid": "191de685-dee5-4eac-944a-940a39615f0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.517293] env[63297]: DEBUG oslo_vmware.api [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698236, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.531492] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5223079e-1cda-051a-e61b-600003af7d4a, 'name': SearchDatastore_Task, 'duration_secs': 0.014163} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.531820] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.532066] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1716.532296] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.532438] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.532620] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1716.532883] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d403b2c4-2d11-4575-9003-578dbaacda35 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.541259] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1716.541443] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1716.542160] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4c1817d-6547-4aad-8f78-815ed1b34f44 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.546952] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1716.546952] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52689640-beff-8d7a-c3e9-22926bd582e4" [ 1716.546952] env[63297]: _type = "Task" [ 1716.546952] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.554435] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52689640-beff-8d7a-c3e9-22926bd582e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.759256] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c02d0d0-1235-4c8b-aeec-b889dea98a1e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.767474] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f286c2ee-4bb8-48e5-9e34-e8ad3983da02 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.799632] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb78cb7-c1b2-4d20-bbb2-d5fdb768bca9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.807023] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febb1d01-d396-4e39-9132-abb868eb3ff6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.820625] env[63297]: DEBUG nova.compute.provider_tree [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.949918] env[63297]: DEBUG oslo_concurrency.lockutils [req-569727f7-e3e9-4238-8f1d-7ce3d921f851 req-d67529d8-b9c5-46f9-8065-43ea916c0500 service nova] Releasing lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.017830] env[63297]: DEBUG oslo_vmware.api [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698236, 'name': ReconfigVM_Task, 'duration_secs': 0.69264} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.018072] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c/volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1717.023124] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce80bf19-89c0-4071-94bf-8f06806d6355 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.038543] env[63297]: DEBUG oslo_vmware.api [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1717.038543] env[63297]: value = "task-1698237" [ 1717.038543] env[63297]: _type = "Task" [ 1717.038543] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.047027] env[63297]: DEBUG oslo_vmware.api [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698237, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.054899] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52689640-beff-8d7a-c3e9-22926bd582e4, 'name': SearchDatastore_Task, 'duration_secs': 0.00928} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.055644] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63181e71-09c4-4057-b28a-7834a8deb392 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.060523] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1717.060523] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a8bb-a1f2-84eb-0124-04e310eab4e6" [ 1717.060523] env[63297]: _type = "Task" [ 1717.060523] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.068937] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a8bb-a1f2-84eb-0124-04e310eab4e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.324387] env[63297]: DEBUG nova.scheduler.client.report [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1717.466532] env[63297]: DEBUG nova.network.neutron [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance_info_cache with network_info: [{"id": "dc0af285-6a18-4cb7-b669-1b9a78865789", "address": "fa:16:3e:f3:6b:12", "network": {"id": "66b15acd-3115-40bf-875e-5291cf68cada", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1077982422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc8039a70b34a269d3aed1ecb558b7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc0af285-6a", "ovs_interfaceid": "dc0af285-6a18-4cb7-b669-1b9a78865789", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.548774] env[63297]: DEBUG oslo_vmware.api [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698237, 'name': ReconfigVM_Task, 'duration_secs': 0.262859} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.549081] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1717.571530] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a8bb-a1f2-84eb-0124-04e310eab4e6, 'name': SearchDatastore_Task, 'duration_secs': 0.011244} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.571821] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.572089] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5c0eefd2-69d4-4100-93b9-d6265c28c7be/5c0eefd2-69d4-4100-93b9-d6265c28c7be.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1717.572403] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b49ca619-fb47-4a25-b9e6-5a36dcf336b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.578596] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1717.578596] env[63297]: value = "task-1698238" [ 1717.578596] env[63297]: _type = "Task" [ 1717.578596] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.586551] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.830111] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.830771] env[63297]: DEBUG nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1717.833642] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.595s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.835106] env[63297]: INFO nova.compute.claims [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1717.970033] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Releasing lock "refresh_cache-ffaa1402-5b51-4393-82c7-d9db964edfd3" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.970230] env[63297]: DEBUG nova.objects.instance [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'migration_context' on Instance uuid ffaa1402-5b51-4393-82c7-d9db964edfd3 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1718.090406] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698238, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.326801] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "3ab25962-2150-4331-a018-aa61bd082814" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.327107] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "3ab25962-2150-4331-a018-aa61bd082814" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.327328] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "3ab25962-2150-4331-a018-aa61bd082814-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.327510] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "3ab25962-2150-4331-a018-aa61bd082814-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.327682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "3ab25962-2150-4331-a018-aa61bd082814-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.329918] env[63297]: INFO nova.compute.manager [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Terminating instance [ 1718.331558] env[63297]: DEBUG nova.compute.manager [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1718.331748] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1718.332591] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f8f3dc-3644-4a45-8fb9-8b731cd8e36f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.342712] env[63297]: DEBUG nova.compute.utils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1718.343832] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1718.344306] env[63297]: DEBUG nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1718.344465] env[63297]: DEBUG nova.network.neutron [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1718.346495] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99ed7d1d-f0a0-49cc-a089-f70603d39615 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.353313] env[63297]: DEBUG oslo_vmware.api [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1718.353313] env[63297]: value = "task-1698239" [ 1718.353313] env[63297]: _type = "Task" [ 1718.353313] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.362669] env[63297]: DEBUG oslo_vmware.api [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698239, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.386334] env[63297]: DEBUG nova.policy [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1718.474107] env[63297]: DEBUG nova.objects.base [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1718.475063] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99d1da2-0791-4210-801f-a4d45e386704 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.497484] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b26e1c18-ee8c-44de-a5d5-4dd53a152dcf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.503887] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1718.503887] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb8eb6-0e3a-dd06-8c37-564f4f5a560a" [ 1718.503887] env[63297]: _type = "Task" [ 1718.503887] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.512821] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb8eb6-0e3a-dd06-8c37-564f4f5a560a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.589063] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698238, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612265} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.589469] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 5c0eefd2-69d4-4100-93b9-d6265c28c7be/5c0eefd2-69d4-4100-93b9-d6265c28c7be.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1718.589688] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1718.589947] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b88fb75b-84a7-4fdd-a420-5bad8f7f3efb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.594949] env[63297]: DEBUG nova.objects.instance [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'flavor' on Instance uuid f87867c3-58d4-4bd6-b6ef-1608ebef6b22 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1718.597590] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1718.597590] env[63297]: value = "task-1698240" [ 1718.597590] env[63297]: _type = "Task" [ 1718.597590] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.605270] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698240, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.850376] env[63297]: DEBUG nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1718.868671] env[63297]: DEBUG oslo_vmware.api [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698239, 'name': PowerOffVM_Task, 'duration_secs': 0.466093} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.869843] env[63297]: DEBUG nova.network.neutron [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Successfully created port: da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1718.872945] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1718.872945] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1718.873263] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e86e1f0-9952-4d40-81cb-77156e8fda27 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.979013] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1718.979266] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1718.979443] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Deleting the datastore file [datastore1] 3ab25962-2150-4331-a018-aa61bd082814 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1718.982271] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3497c912-14ed-474d-b3f3-fcf9d3c66247 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.988968] env[63297]: DEBUG oslo_vmware.api [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for the task: (returnval){ [ 1718.988968] env[63297]: value = "task-1698242" [ 1718.988968] env[63297]: _type = "Task" [ 1718.988968] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.997085] env[63297]: DEBUG oslo_vmware.api [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698242, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.012917] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb8eb6-0e3a-dd06-8c37-564f4f5a560a, 'name': SearchDatastore_Task, 'duration_secs': 0.020456} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.015433] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.073029] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293ad377-c23b-4fde-a3d2-8a232153ab99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.081600] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16eb6811-468e-418c-8624-ec69e44484b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.116755] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d8be0f-139b-43c4-a140-2c0327945efe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.119295] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d3e5e990-0dbf-4e80-94c6-47fd31146437 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.292s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.125746] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698240, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063997} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.127489] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1719.128555] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0b2c63-dd2a-4fc1-85c0-73f0fc8484e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.131808] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f53a55f-ee60-4c06-b8bb-f33fd85ab071 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.157946] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 5c0eefd2-69d4-4100-93b9-d6265c28c7be/5c0eefd2-69d4-4100-93b9-d6265c28c7be.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1719.165819] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11cf5312-6ac2-4bf5-9fe3-7e72cb72eba0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.180144] env[63297]: DEBUG nova.compute.provider_tree [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.189441] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1719.189441] env[63297]: value = "task-1698243" [ 1719.189441] env[63297]: _type = "Task" [ 1719.189441] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.199283] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698243, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.499937] env[63297]: DEBUG oslo_vmware.api [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Task: {'id': task-1698242, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435584} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.500283] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1719.500525] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1719.500762] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1719.500982] env[63297]: INFO nova.compute.manager [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1719.501303] env[63297]: DEBUG oslo.service.loopingcall [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1719.501561] env[63297]: DEBUG nova.compute.manager [-] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1719.501666] env[63297]: DEBUG nova.network.neutron [-] [instance: 3ab25962-2150-4331-a018-aa61bd082814] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1719.564219] env[63297]: INFO nova.compute.manager [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Rebuilding instance [ 1719.604862] env[63297]: DEBUG nova.compute.manager [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1719.605726] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1d35f7-1699-4b05-bd0f-71ebee03ab6e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.684036] env[63297]: DEBUG nova.scheduler.client.report [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1719.709398] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698243, 'name': ReconfigVM_Task, 'duration_secs': 0.326327} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.709823] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 5c0eefd2-69d4-4100-93b9-d6265c28c7be/5c0eefd2-69d4-4100-93b9-d6265c28c7be.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1719.710740] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a53567d-3ad7-4a0c-9782-90b45644c50a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.718897] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1719.718897] env[63297]: value = "task-1698244" [ 1719.718897] env[63297]: _type = "Task" [ 1719.718897] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.731433] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698244, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.838772] env[63297]: DEBUG nova.compute.manager [req-43b36903-83eb-469a-8570-e6f10c512e83 req-b6a3cfc8-166d-4993-93a3-8811aa3fa45e service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Received event network-vif-deleted-d469619d-b568-437d-8023-8d02e02b7350 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.838889] env[63297]: INFO nova.compute.manager [req-43b36903-83eb-469a-8570-e6f10c512e83 req-b6a3cfc8-166d-4993-93a3-8811aa3fa45e service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Neutron deleted interface d469619d-b568-437d-8023-8d02e02b7350; detaching it from the instance and deleting it from the info cache [ 1719.839074] env[63297]: DEBUG nova.network.neutron [req-43b36903-83eb-469a-8570-e6f10c512e83 req-b6a3cfc8-166d-4993-93a3-8811aa3fa45e service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.861349] env[63297]: DEBUG nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1719.888452] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1719.888706] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1719.888865] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1719.889069] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1719.889225] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1719.889373] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1719.889580] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1719.889737] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1719.889901] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1719.890074] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1719.890249] env[63297]: DEBUG nova.virt.hardware [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1719.891202] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f384b8-0c37-4da1-a75f-3fd7ef2a91aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.899191] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea52340-7894-4456-86a9-53a449f5aa9c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.119187] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1720.119485] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-068aa711-4163-439d-8dc0-8cc982c456bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.126998] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1720.126998] env[63297]: value = "task-1698245" [ 1720.126998] env[63297]: _type = "Task" [ 1720.126998] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.135073] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698245, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.191091] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.191658] env[63297]: DEBUG nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1720.194414] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.126s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.194731] env[63297]: DEBUG nova.objects.instance [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lazy-loading 'resources' on Instance uuid d7dc0672-c908-418e-bfcb-8daa761fba37 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1720.228777] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698244, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.314093] env[63297]: DEBUG nova.network.neutron [-] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.342084] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfc48217-f050-4c42-98e1-ddf62e8d5602 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.351623] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c29e07-f810-4313-9d88-3a14d4508794 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.364495] env[63297]: DEBUG nova.compute.manager [req-89498b9a-14c0-4c38-b0fe-4c925f69eb58 req-0f5b66ac-6a66-4fbe-b0d0-2049a33a9744 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-vif-plugged-da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1720.364719] env[63297]: DEBUG oslo_concurrency.lockutils [req-89498b9a-14c0-4c38-b0fe-4c925f69eb58 req-0f5b66ac-6a66-4fbe-b0d0-2049a33a9744 service nova] Acquiring lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.364922] env[63297]: DEBUG oslo_concurrency.lockutils [req-89498b9a-14c0-4c38-b0fe-4c925f69eb58 req-0f5b66ac-6a66-4fbe-b0d0-2049a33a9744 service nova] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.365165] env[63297]: DEBUG oslo_concurrency.lockutils [req-89498b9a-14c0-4c38-b0fe-4c925f69eb58 req-0f5b66ac-6a66-4fbe-b0d0-2049a33a9744 service nova] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.365338] env[63297]: DEBUG nova.compute.manager [req-89498b9a-14c0-4c38-b0fe-4c925f69eb58 req-0f5b66ac-6a66-4fbe-b0d0-2049a33a9744 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] No waiting events found dispatching network-vif-plugged-da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1720.365674] env[63297]: WARNING nova.compute.manager [req-89498b9a-14c0-4c38-b0fe-4c925f69eb58 req-0f5b66ac-6a66-4fbe-b0d0-2049a33a9744 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received unexpected event network-vif-plugged-da1879c7-6003-4193-ab1c-019184bded8d for instance with vm_state building and task_state spawning. [ 1720.384599] env[63297]: DEBUG nova.compute.manager [req-43b36903-83eb-469a-8570-e6f10c512e83 req-b6a3cfc8-166d-4993-93a3-8811aa3fa45e service nova] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Detach interface failed, port_id=d469619d-b568-437d-8023-8d02e02b7350, reason: Instance 3ab25962-2150-4331-a018-aa61bd082814 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1720.396917] env[63297]: DEBUG nova.network.neutron [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Successfully updated port: da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1720.636836] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698245, 'name': PowerOffVM_Task, 'duration_secs': 0.381359} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.637207] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1720.691972] env[63297]: INFO nova.compute.manager [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Detaching volume 3a1d595a-6c94-4766-b9a2-b46ebcc6b92c [ 1720.696984] env[63297]: DEBUG nova.compute.utils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1720.702191] env[63297]: DEBUG nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1720.702366] env[63297]: DEBUG nova.network.neutron [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1720.731870] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698244, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.732820] env[63297]: INFO nova.virt.block_device [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Attempting to driver detach volume 3a1d595a-6c94-4766-b9a2-b46ebcc6b92c from mountpoint /dev/sdb [ 1720.733054] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1720.733250] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1720.734062] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9273067-6431-4e4e-81e6-d9a281f83992 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.741810] env[63297]: DEBUG nova.policy [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f80bce1510594b8a95537f814f68b2bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45448893e7ee4b8d896d1bb3f3a9ecf1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1720.764484] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb47a9d-e275-4633-b5d7-2506f30a1d5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.771807] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090b4322-c3dd-4b37-a154-b53ff0aa9e8d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.795730] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3517be-5cc9-4502-9c97-8c985f5ad10e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.810265] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] The volume has not been displaced from its original location: [datastore1] volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c/volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1720.815823] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1720.818373] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cc4db8e-3931-476d-afb0-bc9d29711b3d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.830884] env[63297]: INFO nova.compute.manager [-] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Took 1.33 seconds to deallocate network for instance. [ 1720.838159] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1720.838159] env[63297]: value = "task-1698246" [ 1720.838159] env[63297]: _type = "Task" [ 1720.838159] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.850501] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698246, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.903032] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.903032] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.903032] env[63297]: DEBUG nova.network.neutron [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1720.948764] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb53f1c-5f97-4ceb-8c37-40b5ba591414 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.958424] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc43ece-9a12-4183-86d6-3878bb2b6d81 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.989971] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222523a1-b279-4197-8246-8e6a483a44a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.000033] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1e21ce-0a18-4b55-b9a0-35098e72c0ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.014936] env[63297]: DEBUG nova.compute.provider_tree [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.030359] env[63297]: DEBUG nova.network.neutron [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Successfully created port: d6d4d4d5-0d71-4f02-a84c-3455168e28f2 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1721.205752] env[63297]: DEBUG nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1721.230410] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698244, 'name': Rename_Task, 'duration_secs': 1.148936} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.230748] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1721.231053] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0322611d-7ba2-40e7-bfdb-96e1880b0862 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.237559] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1721.237559] env[63297]: value = "task-1698247" [ 1721.237559] env[63297]: _type = "Task" [ 1721.237559] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.247375] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.340958] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.349576] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698246, 'name': ReconfigVM_Task, 'duration_secs': 0.443636} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.349847] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1721.354755] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-719790ed-15a9-4667-9189-d0e06de056cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.369873] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1721.369873] env[63297]: value = "task-1698248" [ 1721.369873] env[63297]: _type = "Task" [ 1721.369873] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.377921] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698248, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.445308] env[63297]: DEBUG nova.network.neutron [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1721.517936] env[63297]: DEBUG nova.scheduler.client.report [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1721.649021] env[63297]: DEBUG nova.network.neutron [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1721.664925] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.665232] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.747637] env[63297]: DEBUG oslo_vmware.api [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698247, 'name': PowerOnVM_Task, 'duration_secs': 0.444036} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.747853] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1721.748064] env[63297]: INFO nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1721.748245] env[63297]: DEBUG nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1721.748999] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438e7f43-781f-455a-8298-196b094bd2aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.880964] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698248, 'name': ReconfigVM_Task, 'duration_secs': 0.128775} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.881271] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1722.026548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.029045] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.880s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.029045] env[63297]: DEBUG nova.objects.instance [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lazy-loading 'resources' on Instance uuid b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1722.049781] env[63297]: INFO nova.scheduler.client.report [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted allocations for instance d7dc0672-c908-418e-bfcb-8daa761fba37 [ 1722.152171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.152456] env[63297]: DEBUG nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Instance network_info: |[{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1722.152901] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:fa:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da1879c7-6003-4193-ab1c-019184bded8d', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1722.160528] env[63297]: DEBUG oslo.service.loopingcall [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1722.160693] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1722.161253] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9286dc5-4374-49d4-8061-574fb000cba6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.180066] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1722.180066] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1722.186426] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1722.186426] env[63297]: value = "task-1698249" [ 1722.186426] env[63297]: _type = "Task" [ 1722.186426] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.195731] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698249, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.214499] env[63297]: DEBUG nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1722.240998] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1722.241263] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1722.241421] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1722.241602] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1722.241758] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1722.241907] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1722.242125] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1722.242286] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1722.242451] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1722.242612] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1722.242785] env[63297]: DEBUG nova.virt.hardware [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1722.243658] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c55c4e-71d1-4543-83bb-53d632e57bcd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.253041] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039cbb19-6bde-4d76-a752-11afcefa4618 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.276183] env[63297]: INFO nova.compute.manager [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Took 17.71 seconds to build instance. [ 1722.379838] env[63297]: DEBUG nova.compute.manager [req-d2da29a5-6399-465a-8367-122a38337fb7 req-ec6430eb-0dca-4202-a173-02c33f9ca3b3 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-changed-da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1722.380096] env[63297]: DEBUG nova.compute.manager [req-d2da29a5-6399-465a-8367-122a38337fb7 req-ec6430eb-0dca-4202-a173-02c33f9ca3b3 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing instance network info cache due to event network-changed-da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1722.380351] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2da29a5-6399-465a-8367-122a38337fb7 req-ec6430eb-0dca-4202-a173-02c33f9ca3b3 service nova] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.380527] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2da29a5-6399-465a-8367-122a38337fb7 req-ec6430eb-0dca-4202-a173-02c33f9ca3b3 service nova] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.380720] env[63297]: DEBUG nova.network.neutron [req-d2da29a5-6399-465a-8367-122a38337fb7 req-ec6430eb-0dca-4202-a173-02c33f9ca3b3 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing network info cache for port da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1722.514114] env[63297]: DEBUG nova.network.neutron [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Successfully updated port: d6d4d4d5-0d71-4f02-a84c-3455168e28f2 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1722.558836] env[63297]: DEBUG oslo_concurrency.lockutils [None req-09d175c6-0f8c-457d-8c75-53acfc2ebb0f tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "d7dc0672-c908-418e-bfcb-8daa761fba37" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.519s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.696148] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698249, 'name': CreateVM_Task, 'duration_secs': 0.43038} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.699202] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1722.700107] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.700269] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.700586] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1722.701105] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70c19940-8b32-4fc3-921f-a07346ee7f26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.706022] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1722.706022] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525e2703-94b9-a921-7704-4598f0df856e" [ 1722.706022] env[63297]: _type = "Task" [ 1722.706022] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.712817] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8edd32-2062-4b37-9685-d1257b7c647c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.720793] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525e2703-94b9-a921-7704-4598f0df856e, 'name': SearchDatastore_Task, 'duration_secs': 0.009117} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.722646] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.722872] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1722.723112] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.723258] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.723430] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1722.723709] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f53dbe96-78a7-474f-b746-d9082ca38f1e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.726049] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18c588a-d216-493f-bdda-3656587b88ff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.757634] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec508022-2b7a-4fe6-85f8-64e0e9d99153 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.760256] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1722.760429] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1722.761135] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-584c053e-4cdd-46ca-9fdc-d3f817b1a85d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.770594] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882f882d-7a38-4add-a4c1-8cde7714bd4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.777131] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1722.777131] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb54f8-3c47-3ea5-e274-e86558c4cb22" [ 1722.777131] env[63297]: _type = "Task" [ 1722.777131] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.795853] env[63297]: DEBUG oslo_concurrency.lockutils [None req-36dc2918-c8d9-40b0-a5d8-8494943c1f95 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.250s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.796887] env[63297]: DEBUG nova.compute.provider_tree [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.805824] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52cb54f8-3c47-3ea5-e274-e86558c4cb22, 'name': SearchDatastore_Task, 'duration_secs': 0.008083} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.806956] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40b7e42a-ca78-4a81-835b-d70a8e8d9b42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.814523] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1722.814523] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5236dc51-da7c-93cf-16a3-68677e38ea18" [ 1722.814523] env[63297]: _type = "Task" [ 1722.814523] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.827393] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5236dc51-da7c-93cf-16a3-68677e38ea18, 'name': SearchDatastore_Task, 'duration_secs': 0.009162} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.827747] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.828137] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c1696ee9-cb48-414c-b0a0-b6fa2e880a81/c1696ee9-cb48-414c-b0a0-b6fa2e880a81.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1722.829909] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-424329d1-5a0d-46b2-ac36-d6f8c8bcaa11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.836483] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1722.836483] env[63297]: value = "task-1698250" [ 1722.836483] env[63297]: _type = "Task" [ 1722.836483] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.844502] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.930442] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1722.930610] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea1e58c0-e874-4356-958c-73ff4e0e4f8f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.938631] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1722.938631] env[63297]: value = "task-1698251" [ 1722.938631] env[63297]: _type = "Task" [ 1722.938631] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.952694] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1722.952985] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1722.953214] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1722.954107] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fc5ee8-4190-48a7-8db4-e34f46a960be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.975653] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca127cb-b073-45fe-9e81-17a9f0d419e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.982909] env[63297]: WARNING nova.virt.vmwareapi.driver [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1722.983283] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1722.984278] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dc205d-08f0-4577-a2f4-c3113de466ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.992062] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1722.992252] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-414f91e1-056a-4445-b21b-fc531bf10037 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.016452] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "refresh_cache-37a4719c-20b4-4cb3-b8fc-bfa28b906799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.016631] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "refresh_cache-37a4719c-20b4-4cb3-b8fc-bfa28b906799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.016778] env[63297]: DEBUG nova.network.neutron [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1723.106449] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1723.106797] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1723.107143] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleting the datastore file [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1723.108041] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f47b5d7-a335-490b-8024-f274e712318a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.119833] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1723.119833] env[63297]: value = "task-1698253" [ 1723.119833] env[63297]: _type = "Task" [ 1723.119833] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.132521] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.133593] env[63297]: DEBUG nova.network.neutron [req-d2da29a5-6399-465a-8367-122a38337fb7 req-ec6430eb-0dca-4202-a173-02c33f9ca3b3 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updated VIF entry in instance network info cache for port da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1723.134133] env[63297]: DEBUG nova.network.neutron [req-d2da29a5-6399-465a-8367-122a38337fb7 req-ec6430eb-0dca-4202-a173-02c33f9ca3b3 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.301889] env[63297]: DEBUG nova.scheduler.client.report [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1723.349793] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48793} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.350090] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c1696ee9-cb48-414c-b0a0-b6fa2e880a81/c1696ee9-cb48-414c-b0a0-b6fa2e880a81.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1723.350304] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1723.350554] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b2ecfe3-adf0-48e1-9250-d35ac9b5810b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.361772] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1723.361772] env[63297]: value = "task-1698254" [ 1723.361772] env[63297]: _type = "Task" [ 1723.361772] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.369943] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.553314] env[63297]: DEBUG nova.network.neutron [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1723.574362] env[63297]: DEBUG nova.compute.manager [req-07bb5ea6-5020-40f6-a971-28f402185ff2 req-f32d41c5-9723-4914-87bb-75ba1b99a1b7 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Received event network-changed-191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1723.574550] env[63297]: DEBUG nova.compute.manager [req-07bb5ea6-5020-40f6-a971-28f402185ff2 req-f32d41c5-9723-4914-87bb-75ba1b99a1b7 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Refreshing instance network info cache due to event network-changed-191de685-dee5-4eac-944a-940a39615f0c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1723.574819] env[63297]: DEBUG oslo_concurrency.lockutils [req-07bb5ea6-5020-40f6-a971-28f402185ff2 req-f32d41c5-9723-4914-87bb-75ba1b99a1b7 service nova] Acquiring lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.574975] env[63297]: DEBUG oslo_concurrency.lockutils [req-07bb5ea6-5020-40f6-a971-28f402185ff2 req-f32d41c5-9723-4914-87bb-75ba1b99a1b7 service nova] Acquired lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.575149] env[63297]: DEBUG nova.network.neutron [req-07bb5ea6-5020-40f6-a971-28f402185ff2 req-f32d41c5-9723-4914-87bb-75ba1b99a1b7 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Refreshing network info cache for port 191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1723.630532] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.637283] env[63297]: DEBUG oslo_concurrency.lockutils [req-d2da29a5-6399-465a-8367-122a38337fb7 req-ec6430eb-0dca-4202-a173-02c33f9ca3b3 service nova] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.694447] env[63297]: DEBUG nova.network.neutron [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Updating instance_info_cache with network_info: [{"id": "d6d4d4d5-0d71-4f02-a84c-3455168e28f2", "address": "fa:16:3e:1b:3b:38", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d4d4d5-0d", "ovs_interfaceid": "d6d4d4d5-0d71-4f02-a84c-3455168e28f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.807068] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.778s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.810396] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.794s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.831561] env[63297]: INFO nova.scheduler.client.report [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleted allocations for instance b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a [ 1723.871567] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.132082] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.699348} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.132377] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1724.132572] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1724.132746] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1724.197861] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "refresh_cache-37a4719c-20b4-4cb3-b8fc-bfa28b906799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.198594] env[63297]: DEBUG nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Instance network_info: |[{"id": "d6d4d4d5-0d71-4f02-a84c-3455168e28f2", "address": "fa:16:3e:1b:3b:38", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d4d4d5-0d", "ovs_interfaceid": "d6d4d4d5-0d71-4f02-a84c-3455168e28f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1724.198739] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:3b:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6d4d4d5-0d71-4f02-a84c-3455168e28f2', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1724.206803] env[63297]: DEBUG oslo.service.loopingcall [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1724.207677] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1724.210262] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92d78998-b07a-4d00-821b-ee2e8587e481 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.231862] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1724.231862] env[63297]: value = "task-1698255" [ 1724.231862] env[63297]: _type = "Task" [ 1724.231862] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.240563] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698255, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.325835] env[63297]: DEBUG nova.network.neutron [req-07bb5ea6-5020-40f6-a971-28f402185ff2 req-f32d41c5-9723-4914-87bb-75ba1b99a1b7 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Updated VIF entry in instance network info cache for port 191de685-dee5-4eac-944a-940a39615f0c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1724.326335] env[63297]: DEBUG nova.network.neutron [req-07bb5ea6-5020-40f6-a971-28f402185ff2 req-f32d41c5-9723-4914-87bb-75ba1b99a1b7 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Updating instance_info_cache with network_info: [{"id": "191de685-dee5-4eac-944a-940a39615f0c", "address": "fa:16:3e:20:95:37", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap191de685-de", "ovs_interfaceid": "191de685-dee5-4eac-944a-940a39615f0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.341246] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f6949cd2-36fb-4b37-a239-4dffb4ea3912 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.057s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.373646] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.826147} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.373954] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1724.374748] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e176bc54-b641-4a7f-85f4-26e1192efc44 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.397327] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] c1696ee9-cb48-414c-b0a0-b6fa2e880a81/c1696ee9-cb48-414c-b0a0-b6fa2e880a81.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1724.400346] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6337e90e-e838-4506-9e46-37e7ad225235 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.417870] env[63297]: DEBUG nova.compute.manager [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Received event network-vif-plugged-d6d4d4d5-0d71-4f02-a84c-3455168e28f2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1724.418113] env[63297]: DEBUG oslo_concurrency.lockutils [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] Acquiring lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.418321] env[63297]: DEBUG oslo_concurrency.lockutils [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] Lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.418488] env[63297]: DEBUG oslo_concurrency.lockutils [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] Lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.418654] env[63297]: DEBUG nova.compute.manager [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] No waiting events found dispatching network-vif-plugged-d6d4d4d5-0d71-4f02-a84c-3455168e28f2 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1724.418816] env[63297]: WARNING nova.compute.manager [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Received unexpected event network-vif-plugged-d6d4d4d5-0d71-4f02-a84c-3455168e28f2 for instance with vm_state building and task_state spawning. [ 1724.418972] env[63297]: DEBUG nova.compute.manager [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Received event network-changed-d6d4d4d5-0d71-4f02-a84c-3455168e28f2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1724.419138] env[63297]: DEBUG nova.compute.manager [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Refreshing instance network info cache due to event network-changed-d6d4d4d5-0d71-4f02-a84c-3455168e28f2. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1724.419320] env[63297]: DEBUG oslo_concurrency.lockutils [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] Acquiring lock "refresh_cache-37a4719c-20b4-4cb3-b8fc-bfa28b906799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.419476] env[63297]: DEBUG oslo_concurrency.lockutils [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] Acquired lock "refresh_cache-37a4719c-20b4-4cb3-b8fc-bfa28b906799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.419651] env[63297]: DEBUG nova.network.neutron [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Refreshing network info cache for port d6d4d4d5-0d71-4f02-a84c-3455168e28f2 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1724.430352] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1724.430352] env[63297]: value = "task-1698256" [ 1724.430352] env[63297]: _type = "Task" [ 1724.430352] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.440793] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698256, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.516705] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880a9e18-516d-4317-985f-bd7f2c498489 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.526365] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdf4752-04aa-4f38-8289-9f5b5eb57770 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.559166] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7aa3f9-d4bb-4001-9719-f91c748bdaec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.567455] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de7dca1-9a00-41c4-8bf8-a8e769a3a6ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.580900] env[63297]: DEBUG nova.compute.provider_tree [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1724.638506] env[63297]: INFO nova.virt.block_device [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Booting with volume 3a1d595a-6c94-4766-b9a2-b46ebcc6b92c at /dev/sdb [ 1724.677595] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3866eef1-9b14-452a-a8ee-74afa485af21 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.687545] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899b30d0-1f48-4df8-8da4-07e9d95339ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.699229] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1724.699457] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.700051] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.700232] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.700382] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.700537] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.700661] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.700790] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1724.700937] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.721855] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ef30a8f-ba25-4a38-8d98-a785f5cec772 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.730198] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbe0ebc-5b73-4fac-9931-1e9b4d90428a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.750277] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698255, 'name': CreateVM_Task, 'duration_secs': 0.476475} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.750697] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1724.751376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.751543] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.751864] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1724.752122] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b902769c-888d-4316-9511-1a02b15055de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.756397] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1724.756397] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e8342f-28d7-065c-6470-ca9605d448e1" [ 1724.756397] env[63297]: _type = "Task" [ 1724.756397] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.768155] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82773a73-e433-4f9d-84a3-c0ec543a9607 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.777913] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76786431-4e70-4932-8f71-2787a9a5d049 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.780218] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e8342f-28d7-065c-6470-ca9605d448e1, 'name': SearchDatastore_Task, 'duration_secs': 0.013884} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.780517] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.780747] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1724.780974] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.781136] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.781312] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1724.781834] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18bdbf2c-fc68-431d-88a4-408b36307902 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.788941] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1724.789146] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1724.792853] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ea6ffa5-a276-410c-ab04-25052563d0e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.795026] env[63297]: DEBUG nova.virt.block_device [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Updating existing volume attachment record: 9e12dbc8-ae2b-43c2-b322-f126e9e2986b {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1724.800043] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1724.800043] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233125e-e69d-6480-bc2b-1e9084a65b91" [ 1724.800043] env[63297]: _type = "Task" [ 1724.800043] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.807170] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233125e-e69d-6480-bc2b-1e9084a65b91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.828940] env[63297]: DEBUG oslo_concurrency.lockutils [req-07bb5ea6-5020-40f6-a971-28f402185ff2 req-f32d41c5-9723-4914-87bb-75ba1b99a1b7 service nova] Releasing lock "refresh_cache-5c0eefd2-69d4-4100-93b9-d6265c28c7be" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.941733] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698256, 'name': ReconfigVM_Task, 'duration_secs': 0.285508} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.941733] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Reconfigured VM instance instance-0000006d to attach disk [datastore1] c1696ee9-cb48-414c-b0a0-b6fa2e880a81/c1696ee9-cb48-414c-b0a0-b6fa2e880a81.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1724.941934] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-275c7d2f-0688-47b5-b45a-0ea853510ee5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.948297] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1724.948297] env[63297]: value = "task-1698257" [ 1724.948297] env[63297]: _type = "Task" [ 1724.948297] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.955689] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698257, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.084229] env[63297]: DEBUG nova.scheduler.client.report [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1725.140242] env[63297]: DEBUG nova.network.neutron [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Updated VIF entry in instance network info cache for port d6d4d4d5-0d71-4f02-a84c-3455168e28f2. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1725.140620] env[63297]: DEBUG nova.network.neutron [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Updating instance_info_cache with network_info: [{"id": "d6d4d4d5-0d71-4f02-a84c-3455168e28f2", "address": "fa:16:3e:1b:3b:38", "network": {"id": "50b2d189-5e6a-4653-896d-36bdb6b88b4d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-5179984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45448893e7ee4b8d896d1bb3f3a9ecf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d4d4d5-0d", "ovs_interfaceid": "d6d4d4d5-0d71-4f02-a84c-3455168e28f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.203636] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.310575] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233125e-e69d-6480-bc2b-1e9084a65b91, 'name': SearchDatastore_Task, 'duration_secs': 0.008577} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.311354] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24341d88-42a5-4c45-9e71-9f447c0af3be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.316320] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1725.316320] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52097531-dd3a-3317-cb70-bbf128aeabc2" [ 1725.316320] env[63297]: _type = "Task" [ 1725.316320] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.325430] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52097531-dd3a-3317-cb70-bbf128aeabc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.377235] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "42d872d6-da12-474b-8741-1d991d507cfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.377235] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "42d872d6-da12-474b-8741-1d991d507cfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.377235] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "42d872d6-da12-474b-8741-1d991d507cfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.377235] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "42d872d6-da12-474b-8741-1d991d507cfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.377485] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "42d872d6-da12-474b-8741-1d991d507cfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.379371] env[63297]: INFO nova.compute.manager [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Terminating instance [ 1725.381310] env[63297]: DEBUG nova.compute.manager [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1725.381492] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1725.382368] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa1e8d8-c445-4c38-984d-6b06947bdace {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.390093] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1725.390318] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b565a3d3-9b0f-4b8d-9990-a9a542cdfd26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.397453] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1725.397453] env[63297]: value = "task-1698258" [ 1725.397453] env[63297]: _type = "Task" [ 1725.397453] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.405021] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.457238] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698257, 'name': Rename_Task, 'duration_secs': 0.15262} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.457526] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1725.457774] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7261ea23-1fc6-4f05-bca1-1261d774adfa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.465665] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1725.465665] env[63297]: value = "task-1698259" [ 1725.465665] env[63297]: _type = "Task" [ 1725.465665] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.473822] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.643533] env[63297]: DEBUG oslo_concurrency.lockutils [req-ad1a7225-4dd8-42e2-bbab-bbda473a4978 req-f8c0f307-92a6-4dd8-bf3b-2be01342e6a6 service nova] Releasing lock "refresh_cache-37a4719c-20b4-4cb3-b8fc-bfa28b906799" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.827875] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52097531-dd3a-3317-cb70-bbf128aeabc2, 'name': SearchDatastore_Task, 'duration_secs': 0.009263} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.828288] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.828390] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 37a4719c-20b4-4cb3-b8fc-bfa28b906799/37a4719c-20b4-4cb3-b8fc-bfa28b906799.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1725.828658] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3cd0c058-2045-45f0-afd9-3d9883364e20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.835694] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1725.835694] env[63297]: value = "task-1698260" [ 1725.835694] env[63297]: _type = "Task" [ 1725.835694] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.844269] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.911026] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698258, 'name': PowerOffVM_Task, 'duration_secs': 0.309077} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.911427] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1725.911714] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1725.912091] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3089ccb0-208b-47b3-bbd7-3d6da14d12ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.975927] env[63297]: DEBUG oslo_vmware.api [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698259, 'name': PowerOnVM_Task, 'duration_secs': 0.467932} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.976302] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1725.976567] env[63297]: INFO nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Took 6.11 seconds to spawn the instance on the hypervisor. [ 1725.976751] env[63297]: DEBUG nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1725.977547] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f57a90-c417-4756-a7fb-c71027aa3e45 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.995027] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1725.995241] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1725.995380] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleting the datastore file [datastore1] 42d872d6-da12-474b-8741-1d991d507cfa {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1725.995628] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbc6a60c-b777-4f65-aa21-a5120d0fe570 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.003341] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for the task: (returnval){ [ 1726.003341] env[63297]: value = "task-1698262" [ 1726.003341] env[63297]: _type = "Task" [ 1726.003341] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.011558] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.095392] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.286s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.098378] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.758s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.098625] env[63297]: DEBUG nova.objects.instance [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lazy-loading 'resources' on Instance uuid 3ab25962-2150-4331-a018-aa61bd082814 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1726.346279] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.499670] env[63297]: INFO nova.compute.manager [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Took 18.38 seconds to build instance. [ 1726.512432] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.653370] env[63297]: INFO nova.scheduler.client.report [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted allocation for migration e3bc88a1-09ba-4143-9085-38694740e952 [ 1726.751154] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babee6a6-5eee-499c-9db4-a5f2717600b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.758646] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b22c093-fc80-4f1a-b1dd-dcebf9894d50 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.788504] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ecf989-5bd3-47ef-b00a-9aec27343184 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.795819] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b820dd3-21c9-4008-9fe9-7493bb0aaa61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.808916] env[63297]: DEBUG nova.compute.provider_tree [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1726.846422] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.906041] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1726.906315] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1726.906475] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1726.906653] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1726.906793] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1726.906934] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1726.907155] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1726.907309] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1726.908433] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1726.908433] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1726.908433] env[63297]: DEBUG nova.virt.hardware [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1726.909692] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba993a6-42ec-43e5-bb48-ae47cf814f2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.917155] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adcb959-dc21-4c3d-9929-81e9473b9323 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.932638] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:04:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b0d7883-16da-4bdb-b728-dbcd6772ccdb', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1726.940709] env[63297]: DEBUG oslo.service.loopingcall [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1726.940709] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1726.940709] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49531ad3-eaa7-4325-8c13-6e266c3e035e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.959915] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1726.959915] env[63297]: value = "task-1698263" [ 1726.959915] env[63297]: _type = "Task" [ 1726.959915] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.967935] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.001620] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c50bedfa-8890-403b-a204-5755ec091473 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.889s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.013863] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.159088] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.457s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.243173] env[63297]: DEBUG nova.compute.manager [req-451c1d57-a9a9-4a1e-9815-0424a2826d08 req-4ad360bf-c7ba-4914-8004-5c2aec75a63d service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-changed-da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1727.243173] env[63297]: DEBUG nova.compute.manager [req-451c1d57-a9a9-4a1e-9815-0424a2826d08 req-4ad360bf-c7ba-4914-8004-5c2aec75a63d service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing instance network info cache due to event network-changed-da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1727.243173] env[63297]: DEBUG oslo_concurrency.lockutils [req-451c1d57-a9a9-4a1e-9815-0424a2826d08 req-4ad360bf-c7ba-4914-8004-5c2aec75a63d service nova] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.243173] env[63297]: DEBUG oslo_concurrency.lockutils [req-451c1d57-a9a9-4a1e-9815-0424a2826d08 req-4ad360bf-c7ba-4914-8004-5c2aec75a63d service nova] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.243173] env[63297]: DEBUG nova.network.neutron [req-451c1d57-a9a9-4a1e-9815-0424a2826d08 req-4ad360bf-c7ba-4914-8004-5c2aec75a63d service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing network info cache for port da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1727.312656] env[63297]: DEBUG nova.scheduler.client.report [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1727.347473] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.470281] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.515326] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.817416] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.719s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.819543] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.616s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.819756] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.819920] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1727.821764] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec89ceae-a97a-4f58-9beb-6bf7f9a6b9c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.830396] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a24c97-4b55-4c90-8273-c27e3806b824 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.847184] env[63297]: INFO nova.scheduler.client.report [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Deleted allocations for instance 3ab25962-2150-4331-a018-aa61bd082814 [ 1727.851743] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74a18dc-a5ee-4358-9aed-4215306d6b10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.864460] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698260, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.867613] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95115b91-509e-43b1-a674-9905e517472a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.897456] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179513MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1727.897664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.897804] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.971348] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.015535] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.023618] env[63297]: DEBUG nova.network.neutron [req-451c1d57-a9a9-4a1e-9815-0424a2826d08 req-4ad360bf-c7ba-4914-8004-5c2aec75a63d service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updated VIF entry in instance network info cache for port da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1728.024028] env[63297]: DEBUG nova.network.neutron [req-451c1d57-a9a9-4a1e-9815-0424a2826d08 req-4ad360bf-c7ba-4914-8004-5c2aec75a63d service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.134351] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.134586] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.236265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.236265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.236265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.236265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.236265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.237855] env[63297]: INFO nova.compute.manager [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Terminating instance [ 1728.239591] env[63297]: DEBUG nova.compute.manager [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1728.239787] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1728.240631] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf64e44-048d-47d3-8f00-5308cf81348a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.248585] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1728.248849] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d19ccf30-0b4e-40e7-9d98-d2440c1ecb0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.255134] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1728.255134] env[63297]: value = "task-1698264" [ 1728.255134] env[63297]: _type = "Task" [ 1728.255134] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.264721] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698264, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.355912] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698260, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.240263} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.356191] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 37a4719c-20b4-4cb3-b8fc-bfa28b906799/37a4719c-20b4-4cb3-b8fc-bfa28b906799.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1728.356407] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1728.356669] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ce53c07-f925-401e-b9b5-5120acef147c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.363847] env[63297]: DEBUG oslo_concurrency.lockutils [None req-94fccb02-5b44-4e8a-947a-45f5e7b53560 tempest-SecurityGroupsTestJSON-9806820 tempest-SecurityGroupsTestJSON-9806820-project-member] Lock "3ab25962-2150-4331-a018-aa61bd082814" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.037s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.366615] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1728.366615] env[63297]: value = "task-1698265" [ 1728.366615] env[63297]: _type = "Task" [ 1728.366615] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.374503] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.471060] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.515586] env[63297]: DEBUG oslo_vmware.api [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Task: {'id': task-1698262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.063437} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.515832] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1728.516031] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1728.516394] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1728.516526] env[63297]: INFO nova.compute.manager [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Took 3.13 seconds to destroy the instance on the hypervisor. [ 1728.516660] env[63297]: DEBUG oslo.service.loopingcall [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.516850] env[63297]: DEBUG nova.compute.manager [-] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1728.516963] env[63297]: DEBUG nova.network.neutron [-] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1728.526301] env[63297]: DEBUG oslo_concurrency.lockutils [req-451c1d57-a9a9-4a1e-9815-0424a2826d08 req-4ad360bf-c7ba-4914-8004-5c2aec75a63d service nova] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.636593] env[63297]: DEBUG nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1728.765367] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698264, 'name': PowerOffVM_Task, 'duration_secs': 0.24797} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.765774] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1728.765948] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1728.766212] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b807e921-927f-4680-bf29-fcc22b58bcbb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.819073] env[63297]: DEBUG nova.compute.manager [req-aa989f6e-2e37-4727-afd2-bc966271a14e req-0bbaa9f3-4e20-4b1e-9d6f-ad1afa0ab95e service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Received event network-vif-deleted-6875a0ac-55bd-4388-9c3d-5105d4cf1c7a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1728.819499] env[63297]: INFO nova.compute.manager [req-aa989f6e-2e37-4727-afd2-bc966271a14e req-0bbaa9f3-4e20-4b1e-9d6f-ad1afa0ab95e service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Neutron deleted interface 6875a0ac-55bd-4388-9c3d-5105d4cf1c7a; detaching it from the instance and deleting it from the info cache [ 1728.819703] env[63297]: DEBUG nova.network.neutron [req-aa989f6e-2e37-4727-afd2-bc966271a14e req-0bbaa9f3-4e20-4b1e-9d6f-ad1afa0ab95e service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.877408] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123838} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.877801] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1728.878275] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eec9cf2-1008-4969-9dfe-cf6ac5cf474e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.901070] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 37a4719c-20b4-4cb3-b8fc-bfa28b906799/37a4719c-20b4-4cb3-b8fc-bfa28b906799.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1728.901651] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f7a849a-a5b6-4da0-86f4-1d1c366a828e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.927447] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1728.927447] env[63297]: value = "task-1698267" [ 1728.927447] env[63297]: _type = "Task" [ 1728.927447] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.937324] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.938427] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.938521] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.938608] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 42d872d6-da12-474b-8741-1d991d507cfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.938726] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance f87867c3-58d4-4bd6-b6ef-1608ebef6b22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.938842] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.938955] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.939082] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance ffaa1402-5b51-4393-82c7-d9db964edfd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.939194] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5c0eefd2-69d4-4100-93b9-d6265c28c7be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.939303] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance c1696ee9-cb48-414c-b0a0-b6fa2e880a81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.939413] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 37a4719c-20b4-4cb3-b8fc-bfa28b906799 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.975726] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.157565] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.295049] env[63297]: DEBUG nova.network.neutron [-] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1729.322175] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e92b94c-1ddd-46cd-a065-7ef160016983 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.332550] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c227ad-0ecb-463f-9589-1f9c639ea538 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.363728] env[63297]: DEBUG nova.compute.manager [req-aa989f6e-2e37-4727-afd2-bc966271a14e req-0bbaa9f3-4e20-4b1e-9d6f-ad1afa0ab95e service nova] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Detach interface failed, port_id=6875a0ac-55bd-4388-9c3d-5105d4cf1c7a, reason: Instance 42d872d6-da12-474b-8741-1d991d507cfa could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1729.436511] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698267, 'name': ReconfigVM_Task, 'duration_secs': 0.305957} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.436774] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 37a4719c-20b4-4cb3-b8fc-bfa28b906799/37a4719c-20b4-4cb3-b8fc-bfa28b906799.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1729.437425] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c42410e8-3bcf-4227-b067-336dd0c1e85c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.442367] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 927824d3-a98b-47b4-a850-1fb15fd0fbe4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1729.442602] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1729.442749] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1729.446572] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1729.446572] env[63297]: value = "task-1698268" [ 1729.446572] env[63297]: _type = "Task" [ 1729.446572] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.455180] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698268, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.472536] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.605175] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c79cec0-1d51-43c5-94ca-3a782ee22dba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.613286] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9941b5e3-d877-4599-9e5c-8a4c4459494d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.645115] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c1155f-f5bf-43b7-9830-553a20a6dc42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.652924] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca43606-deab-419b-8f30-dc936ef4ffdb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.667014] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1729.798197] env[63297]: INFO nova.compute.manager [-] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Took 1.28 seconds to deallocate network for instance. [ 1729.956840] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698268, 'name': Rename_Task, 'duration_secs': 0.148131} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.957186] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1729.957413] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60588123-011b-4f6a-b443-b0b60ed1b4ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.964546] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1729.964546] env[63297]: value = "task-1698269" [ 1729.964546] env[63297]: _type = "Task" [ 1729.964546] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.975670] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.978627] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.196864] env[63297]: ERROR nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [req-2312da0b-7449-451b-8f43-6e1dbb11443b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2312da0b-7449-451b-8f43-6e1dbb11443b"}]} [ 1730.214050] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1730.230315] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1730.230524] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1730.246107] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1730.272247] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1730.306947] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.452171] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5d3a81-1b8b-42b7-8e24-9b3474184270 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.461098] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb00a4f-6aeb-437a-ae4f-d7103d5c08c3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.496995] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ab5cab-10fa-4569-8a1d-7797127c6e63 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.508018] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.512452] env[63297]: DEBUG oslo_vmware.api [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698269, 'name': PowerOnVM_Task, 'duration_secs': 0.452218} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.512844] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1730.513092] env[63297]: INFO nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1730.516600] env[63297]: DEBUG nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1730.516600] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732233bc-9eff-43a2-9f8e-4045c8168580 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.519371] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4449ac1b-4a7b-4d14-9659-75034dfbc21c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.536114] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1730.975495] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.047853] env[63297]: INFO nova.compute.manager [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Took 20.83 seconds to build instance. [ 1731.074332] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 158 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1731.074586] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 158 to 159 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1731.074724] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1731.296744] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1731.296937] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1731.297139] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleting the datastore file [datastore1] ffaa1402-5b51-4393-82c7-d9db964edfd3 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1731.297419] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-821d6432-b79b-4e86-a46e-48303afd2540 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.304561] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for the task: (returnval){ [ 1731.304561] env[63297]: value = "task-1698270" [ 1731.304561] env[63297]: _type = "Task" [ 1731.304561] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.312310] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.475992] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698263, 'name': CreateVM_Task, 'duration_secs': 4.336714} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.476179] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1731.476879] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.477088] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.477461] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1731.477730] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e564a73f-8cac-45d9-ba2e-a8eff8080b62 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.482803] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1731.482803] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5265cd98-f25d-4804-7991-0f1892facd45" [ 1731.482803] env[63297]: _type = "Task" [ 1731.482803] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.490817] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5265cd98-f25d-4804-7991-0f1892facd45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.550634] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71eead68-1ab5-435b-9405-1484af94f021 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.348s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.581292] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1731.581569] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.684s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.581946] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.424s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.583893] env[63297]: INFO nova.compute.claims [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1731.586949] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1731.587304] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1731.818531] env[63297]: DEBUG oslo_vmware.api [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Task: {'id': task-1698270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238063} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.818779] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1731.818977] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1731.819192] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1731.819418] env[63297]: INFO nova.compute.manager [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Took 3.58 seconds to destroy the instance on the hypervisor. [ 1731.819636] env[63297]: DEBUG oslo.service.loopingcall [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1731.819866] env[63297]: DEBUG nova.compute.manager [-] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1731.819963] env[63297]: DEBUG nova.network.neutron [-] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1731.992932] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5265cd98-f25d-4804-7991-0f1892facd45, 'name': SearchDatastore_Task, 'duration_secs': 0.009191} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.993252] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.993474] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1731.993701] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.993846] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.994091] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1731.994364] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13bcec8f-8879-4ecb-b4e4-38a4bdea419e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.003093] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1732.003271] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1732.004020] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23d68958-11a6-45b5-9136-fe26f029f601 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.008987] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1732.008987] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b79ff-4b1a-8efd-da23-fa0f73aeb31a" [ 1732.008987] env[63297]: _type = "Task" [ 1732.008987] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.016884] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b79ff-4b1a-8efd-da23-fa0f73aeb31a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.103328] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] There are 47 instances to clean {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1732.103446] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 5a9b3c6d-e2e6-4d2d-a5c9-ea031e60b886] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1732.121542] env[63297]: DEBUG nova.compute.manager [req-d68ee867-d698-4e6b-8099-611f7fd33204 req-c9684e38-add9-4872-9cc2-8669d61c0bdb service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Received event network-vif-deleted-dc0af285-6a18-4cb7-b669-1b9a78865789 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.121749] env[63297]: INFO nova.compute.manager [req-d68ee867-d698-4e6b-8099-611f7fd33204 req-c9684e38-add9-4872-9cc2-8669d61c0bdb service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Neutron deleted interface dc0af285-6a18-4cb7-b669-1b9a78865789; detaching it from the instance and deleting it from the info cache [ 1732.121922] env[63297]: DEBUG nova.network.neutron [req-d68ee867-d698-4e6b-8099-611f7fd33204 req-c9684e38-add9-4872-9cc2-8669d61c0bdb service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.519012] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520b79ff-4b1a-8efd-da23-fa0f73aeb31a, 'name': SearchDatastore_Task, 'duration_secs': 0.010551} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.519829] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58113a7a-be17-4a65-921f-a2fdb15d76d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.525628] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1732.525628] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c48864-d648-f0db-0f4e-46ae440ebf7b" [ 1732.525628] env[63297]: _type = "Task" [ 1732.525628] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.533034] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c48864-d648-f0db-0f4e-46ae440ebf7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.610307] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 314c2cd3-6f1d-4d74-ad84-d7cc44375456] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1732.612217] env[63297]: DEBUG nova.network.neutron [-] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.625022] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8dd5edd9-b790-4ee6-80af-063ae37df7d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.635375] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17226571-1875-499a-8e19-ec817321b532 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.669847] env[63297]: DEBUG nova.compute.manager [req-d68ee867-d698-4e6b-8099-611f7fd33204 req-c9684e38-add9-4872-9cc2-8669d61c0bdb service nova] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Detach interface failed, port_id=dc0af285-6a18-4cb7-b669-1b9a78865789, reason: Instance ffaa1402-5b51-4393-82c7-d9db964edfd3 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1732.772296] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19baccc0-d0ee-41e7-9cd4-0e729b557abc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.781761] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a0653f-000b-44db-a91f-feffcdcd3064 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.813270] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730b533d-955f-4502-9e00-b06387156012 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.821655] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4125b8a3-742d-4a6b-b7aa-e55ff2ad8c95 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.836394] env[63297]: DEBUG nova.compute.provider_tree [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1732.896018] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.896275] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.896503] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.896684] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.896855] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.899092] env[63297]: INFO nova.compute.manager [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Terminating instance [ 1732.900901] env[63297]: DEBUG nova.compute.manager [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1732.901119] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1732.902046] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387cd5e3-b2d3-4596-9b9a-ca5eafc1228a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.910238] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1732.910520] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-604d8ac3-01a4-4244-8970-66cd6feea55c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.918635] env[63297]: DEBUG oslo_vmware.api [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1732.918635] env[63297]: value = "task-1698271" [ 1732.918635] env[63297]: _type = "Task" [ 1732.918635] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.927229] env[63297]: DEBUG oslo_vmware.api [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.036881] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c48864-d648-f0db-0f4e-46ae440ebf7b, 'name': SearchDatastore_Task, 'duration_secs': 0.019075} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.037237] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.038071] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22/f87867c3-58d4-4bd6-b6ef-1608ebef6b22.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1733.038071] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-357c72cc-7f94-4473-9672-a8b01efcd5df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.044593] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1733.044593] env[63297]: value = "task-1698272" [ 1733.044593] env[63297]: _type = "Task" [ 1733.044593] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.053835] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.114273] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: d7dc0672-c908-418e-bfcb-8daa761fba37] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1733.116750] env[63297]: INFO nova.compute.manager [-] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Took 1.30 seconds to deallocate network for instance. [ 1733.376409] env[63297]: DEBUG nova.scheduler.client.report [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 159 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1733.376713] env[63297]: DEBUG nova.compute.provider_tree [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 159 to 160 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1733.376895] env[63297]: DEBUG nova.compute.provider_tree [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1733.429279] env[63297]: DEBUG oslo_vmware.api [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698271, 'name': PowerOffVM_Task, 'duration_secs': 0.200219} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.429566] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1733.429734] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1733.429994] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a6db6d4-e031-4d1f-861f-86b54578565f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.521832] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1733.522422] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1733.522681] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleting the datastore file [datastore1] 37a4719c-20b4-4cb3-b8fc-bfa28b906799 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1733.523044] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a7afb08-9c86-452b-b5eb-efecf2b98ca9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.531195] env[63297]: DEBUG oslo_vmware.api [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for the task: (returnval){ [ 1733.531195] env[63297]: value = "task-1698274" [ 1733.531195] env[63297]: _type = "Task" [ 1733.531195] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.539565] env[63297]: DEBUG oslo_vmware.api [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.556251] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698272, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.618100] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3ab25962-2150-4331-a018-aa61bd082814] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1733.626139] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.882381] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.882867] env[63297]: DEBUG nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1733.885695] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.579s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.885917] env[63297]: DEBUG nova.objects.instance [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lazy-loading 'resources' on Instance uuid 42d872d6-da12-474b-8741-1d991d507cfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1734.040959] env[63297]: DEBUG oslo_vmware.api [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.055923] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585958} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.055923] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22/f87867c3-58d4-4bd6-b6ef-1608ebef6b22.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1734.057121] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1734.057121] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d3f3a98-f27d-44f4-867d-6b122a1d77d7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.063058] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1734.063058] env[63297]: value = "task-1698275" [ 1734.063058] env[63297]: _type = "Task" [ 1734.063058] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.071741] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.121638] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c89d23b7-deb0-4394-9a42-2ac3990da98d] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1734.389202] env[63297]: DEBUG nova.compute.utils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1734.394524] env[63297]: DEBUG nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1734.394524] env[63297]: DEBUG nova.network.neutron [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1734.432032] env[63297]: DEBUG nova.policy [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1734.532098] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bb4e0b-5158-401d-a657-a95615c47541 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.543164] env[63297]: DEBUG oslo_vmware.api [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Task: {'id': task-1698274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.621226} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.545080] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1734.545303] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1734.545492] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1734.545692] env[63297]: INFO nova.compute.manager [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1734.545905] env[63297]: DEBUG oslo.service.loopingcall [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1734.546172] env[63297]: DEBUG nova.compute.manager [-] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1734.546264] env[63297]: DEBUG nova.network.neutron [-] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1734.548477] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e0fa64-fce7-46cb-a780-791761645d96 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.584940] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3dbb37-15cc-46dc-a0c7-fe18178eb229 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.593845] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219963} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.595036] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1734.595721] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb709ac-a761-4dba-ac92-e3e62ba8d1cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.598933] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2a58db-6b93-4410-9a4c-3afa6ee13132 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.622170] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22/f87867c3-58d4-4bd6-b6ef-1608ebef6b22.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1734.629840] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39c1753a-c36c-47e8-b4c6-d2415108ec89 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.644396] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c2362520-ed07-4124-aade-bb54830b0d54] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1734.646638] env[63297]: DEBUG nova.compute.provider_tree [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1734.659263] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1734.659263] env[63297]: value = "task-1698276" [ 1734.659263] env[63297]: _type = "Task" [ 1734.659263] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.666590] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698276, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.720172] env[63297]: DEBUG nova.network.neutron [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Successfully created port: 29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1734.845030] env[63297]: DEBUG nova.compute.manager [req-89e1fdaf-01c5-4622-b9c0-dbc90d627ee3 req-482da14e-2578-4612-9610-087927ba57cd service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Received event network-vif-deleted-d6d4d4d5-0d71-4f02-a84c-3455168e28f2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1734.845365] env[63297]: INFO nova.compute.manager [req-89e1fdaf-01c5-4622-b9c0-dbc90d627ee3 req-482da14e-2578-4612-9610-087927ba57cd service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Neutron deleted interface d6d4d4d5-0d71-4f02-a84c-3455168e28f2; detaching it from the instance and deleting it from the info cache [ 1734.845426] env[63297]: DEBUG nova.network.neutron [req-89e1fdaf-01c5-4622-b9c0-dbc90d627ee3 req-482da14e-2578-4612-9610-087927ba57cd service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.895205] env[63297]: DEBUG nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1735.151142] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: d70bfe65-5faa-4248-9119-9a38259cb418] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1735.153094] env[63297]: DEBUG nova.scheduler.client.report [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1735.166660] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698276, 'name': ReconfigVM_Task, 'duration_secs': 0.283872} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.167509] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfigured VM instance instance-00000065 to attach disk [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22/f87867c3-58d4-4bd6-b6ef-1608ebef6b22.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1735.168634] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'disk_bus': None, 'encryption_format': None, 'device_name': '/dev/sda', 'size': 0, 'encrypted': False, 'guest_format': None, 'device_type': 'disk', 'boot_index': 0, 'encryption_options': None, 'image_id': '41f1ad71-37f2-4e86-a900-da4965eba44f'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'}, 'mount_device': '/dev/sdb', 'disk_bus': None, 'attachment_id': '9e12dbc8-ae2b-43c2-b322-f126e9e2986b', 'guest_format': None, 'device_type': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=63297) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1735.168879] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1735.169091] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1735.170101] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5db9bd4-3e8b-42a1-9d34-4408308da910 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.186851] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab085c00-2cd1-4dbb-b8a3-b2f91756bda6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.211752] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c/volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1735.212036] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9b5376e-eb3c-4a37-9ea0-6bf7581be1bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.229473] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1735.229473] env[63297]: value = "task-1698277" [ 1735.229473] env[63297]: _type = "Task" [ 1735.229473] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.238352] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698277, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.316675] env[63297]: DEBUG nova.network.neutron [-] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.348774] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d23f06d-1dfa-44b2-85c4-ede3091f30d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.358274] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9295d5-c57c-46e6-8c00-8f1fadd59e56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.388131] env[63297]: DEBUG nova.compute.manager [req-89e1fdaf-01c5-4622-b9c0-dbc90d627ee3 req-482da14e-2578-4612-9610-087927ba57cd service nova] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Detach interface failed, port_id=d6d4d4d5-0d71-4f02-a84c-3455168e28f2, reason: Instance 37a4719c-20b4-4cb3-b8fc-bfa28b906799 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1735.657237] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 617544f0-fa53-415d-9f00-c8143e8e25b1] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1735.659956] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.774s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.661850] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.036s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.662275] env[63297]: DEBUG nova.objects.instance [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lazy-loading 'resources' on Instance uuid ffaa1402-5b51-4393-82c7-d9db964edfd3 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1735.683722] env[63297]: INFO nova.scheduler.client.report [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Deleted allocations for instance 42d872d6-da12-474b-8741-1d991d507cfa [ 1735.739368] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698277, 'name': ReconfigVM_Task, 'duration_secs': 0.288831} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.739638] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c/volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1735.744316] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-182a597e-f1f5-469b-9290-2be10447571d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.759123] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1735.759123] env[63297]: value = "task-1698278" [ 1735.759123] env[63297]: _type = "Task" [ 1735.759123] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.767052] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698278, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.819349] env[63297]: INFO nova.compute.manager [-] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Took 1.27 seconds to deallocate network for instance. [ 1735.905102] env[63297]: DEBUG nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1735.932233] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1735.932525] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1735.932680] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1735.932855] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1735.933009] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1735.933167] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1735.933378] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1735.933546] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1735.933763] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1735.933938] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1735.934125] env[63297]: DEBUG nova.virt.hardware [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1735.934995] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0feb096-94b7-4061-af14-8014cb5c71a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.943204] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edbc2f0-95f3-4065-aab9-48a48112b450 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.162787] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 1110d6ca-ca5f-44d1-baca-c22c8fc166b5] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1736.183839] env[63297]: DEBUG nova.compute.manager [req-310d661b-a7b1-4edf-b6a5-d30f268c9550 req-3b731021-e8ac-4858-8459-ad82987ab2aa service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received event network-vif-plugged-29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1736.183955] env[63297]: DEBUG oslo_concurrency.lockutils [req-310d661b-a7b1-4edf-b6a5-d30f268c9550 req-3b731021-e8ac-4858-8459-ad82987ab2aa service nova] Acquiring lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.184227] env[63297]: DEBUG oslo_concurrency.lockutils [req-310d661b-a7b1-4edf-b6a5-d30f268c9550 req-3b731021-e8ac-4858-8459-ad82987ab2aa service nova] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.184402] env[63297]: DEBUG oslo_concurrency.lockutils [req-310d661b-a7b1-4edf-b6a5-d30f268c9550 req-3b731021-e8ac-4858-8459-ad82987ab2aa service nova] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.184573] env[63297]: DEBUG nova.compute.manager [req-310d661b-a7b1-4edf-b6a5-d30f268c9550 req-3b731021-e8ac-4858-8459-ad82987ab2aa service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] No waiting events found dispatching network-vif-plugged-29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1736.184740] env[63297]: WARNING nova.compute.manager [req-310d661b-a7b1-4edf-b6a5-d30f268c9550 req-3b731021-e8ac-4858-8459-ad82987ab2aa service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received unexpected event network-vif-plugged-29d488c8-0537-45c6-97ba-b70aec11141b for instance with vm_state building and task_state spawning. [ 1736.192536] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d2cf74b-2a5e-46b9-85a8-bef7444dd672 tempest-ServerRescueNegativeTestJSON-169332532 tempest-ServerRescueNegativeTestJSON-169332532-project-member] Lock "42d872d6-da12-474b-8741-1d991d507cfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.816s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.270140] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698278, 'name': ReconfigVM_Task, 'duration_secs': 0.150308} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.270479] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1736.272732] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e9a2078-08f7-433a-babe-301dd0db7a2c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.279422] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1736.279422] env[63297]: value = "task-1698279" [ 1736.279422] env[63297]: _type = "Task" [ 1736.279422] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.281855] env[63297]: DEBUG nova.network.neutron [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Successfully updated port: 29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1736.290955] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698279, 'name': Rename_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.321043] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6129e565-4943-4727-9dda-562acfcc7cca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.326627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.330198] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-524eaaa5-623a-4cb0-b625-6331a52dbcc1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.361514] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe614f4-98c0-4a13-832b-f637f5d8b15b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.369525] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263fe0b9-6d1f-465a-b0e0-38e03c061264 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.383296] env[63297]: DEBUG nova.compute.provider_tree [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1736.668583] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: fd178a30-b5f0-4019-a05f-f1928e1d122a] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1736.788959] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.789194] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.789414] env[63297]: DEBUG nova.network.neutron [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1736.790649] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698279, 'name': Rename_Task, 'duration_secs': 0.156052} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.791233] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1736.791507] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad4d673d-a15e-4708-8922-afdd323733bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.798891] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1736.798891] env[63297]: value = "task-1698280" [ 1736.798891] env[63297]: _type = "Task" [ 1736.798891] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.807676] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.886521] env[63297]: DEBUG nova.scheduler.client.report [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1737.173836] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: a6d86e78-ae24-4e70-9fb2-270177b40322] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1737.316880] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698280, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.325127] env[63297]: DEBUG nova.network.neutron [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1737.392081] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.730s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.394240] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.068s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.394492] env[63297]: DEBUG nova.objects.instance [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lazy-loading 'resources' on Instance uuid 37a4719c-20b4-4cb3-b8fc-bfa28b906799 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1737.419773] env[63297]: INFO nova.scheduler.client.report [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Deleted allocations for instance ffaa1402-5b51-4393-82c7-d9db964edfd3 [ 1737.539017] env[63297]: DEBUG nova.network.neutron [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.702013] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: f9ad9854-2f5b-4edd-9636-8d36d0a89e89] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1737.813872] env[63297]: DEBUG oslo_vmware.api [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698280, 'name': PowerOnVM_Task, 'duration_secs': 0.573189} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.814213] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1737.814500] env[63297]: DEBUG nova.compute.manager [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1737.815573] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e327e350-f038-4213-9bac-cdbfc170a7d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.935682] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e0903752-c2f0-4489-892e-140e80156fb2 tempest-DeleteServersTestJSON-708693933 tempest-DeleteServersTestJSON-708693933-project-member] Lock "ffaa1402-5b51-4393-82c7-d9db964edfd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.700s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.035743] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3647af-393a-4f47-82d6-4b655791512c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.041455] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.041763] env[63297]: DEBUG nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Instance network_info: |[{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1738.043601] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:87:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29d488c8-0537-45c6-97ba-b70aec11141b', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1738.051008] env[63297]: DEBUG oslo.service.loopingcall [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.051280] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1738.052226] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db83baf-ea54-4cb2-b299-e856c6c53105 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.055521] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4107824f-b935-4949-81cc-6b9d1ad4b464 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.099806] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71bd6a7-0ed4-485f-8f26-9a06b125a6ad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.102375] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1738.102375] env[63297]: value = "task-1698282" [ 1738.102375] env[63297]: _type = "Task" [ 1738.102375] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.109152] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece0d207-13fd-4696-a14b-e5ce0a012152 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.115787] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698282, 'name': CreateVM_Task} progress is 15%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.125273] env[63297]: DEBUG nova.compute.provider_tree [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1738.180203] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: cc644ecc-7340-421c-b966-19145eb82949] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1738.212566] env[63297]: DEBUG nova.compute.manager [req-1ffd9fce-19af-46e0-b2ac-3b4351d14ea1 req-cfc8f7ce-093b-4955-96da-5afdbc2a4e1e service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received event network-changed-29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1738.212764] env[63297]: DEBUG nova.compute.manager [req-1ffd9fce-19af-46e0-b2ac-3b4351d14ea1 req-cfc8f7ce-093b-4955-96da-5afdbc2a4e1e service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing instance network info cache due to event network-changed-29d488c8-0537-45c6-97ba-b70aec11141b. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1738.212982] env[63297]: DEBUG oslo_concurrency.lockutils [req-1ffd9fce-19af-46e0-b2ac-3b4351d14ea1 req-cfc8f7ce-093b-4955-96da-5afdbc2a4e1e service nova] Acquiring lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.213142] env[63297]: DEBUG oslo_concurrency.lockutils [req-1ffd9fce-19af-46e0-b2ac-3b4351d14ea1 req-cfc8f7ce-093b-4955-96da-5afdbc2a4e1e service nova] Acquired lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.213307] env[63297]: DEBUG nova.network.neutron [req-1ffd9fce-19af-46e0-b2ac-3b4351d14ea1 req-cfc8f7ce-093b-4955-96da-5afdbc2a4e1e service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing network info cache for port 29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1738.335160] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.613499] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698282, 'name': CreateVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.628821] env[63297]: DEBUG nova.scheduler.client.report [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1738.683325] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 6c1aa85a-ee37-461b-ad8a-7fbb525e836e] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1739.000892] env[63297]: DEBUG nova.network.neutron [req-1ffd9fce-19af-46e0-b2ac-3b4351d14ea1 req-cfc8f7ce-093b-4955-96da-5afdbc2a4e1e service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updated VIF entry in instance network info cache for port 29d488c8-0537-45c6-97ba-b70aec11141b. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1739.002376] env[63297]: DEBUG nova.network.neutron [req-1ffd9fce-19af-46e0-b2ac-3b4351d14ea1 req-cfc8f7ce-093b-4955-96da-5afdbc2a4e1e service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.114533] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698282, 'name': CreateVM_Task, 'duration_secs': 0.58938} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.114719] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1739.115387] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.115569] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.115894] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1739.116158] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23b05afc-b610-445e-9d26-58f0588a4e07 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.120820] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1739.120820] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525f33cd-3fd3-69d5-a987-c37c51fc9a8e" [ 1739.120820] env[63297]: _type = "Task" [ 1739.120820] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.128074] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525f33cd-3fd3-69d5-a987-c37c51fc9a8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.133817] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.740s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.135860] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.801s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.136068] env[63297]: DEBUG nova.objects.instance [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1739.154885] env[63297]: INFO nova.scheduler.client.report [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Deleted allocations for instance 37a4719c-20b4-4cb3-b8fc-bfa28b906799 [ 1739.187759] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 1d8c6df5-069f-4647-a2f6-e69a4bf8be94] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1739.316373] env[63297]: DEBUG oslo_concurrency.lockutils [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.316645] env[63297]: DEBUG oslo_concurrency.lockutils [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.505943] env[63297]: DEBUG oslo_concurrency.lockutils [req-1ffd9fce-19af-46e0-b2ac-3b4351d14ea1 req-cfc8f7ce-093b-4955-96da-5afdbc2a4e1e service nova] Releasing lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.632714] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525f33cd-3fd3-69d5-a987-c37c51fc9a8e, 'name': SearchDatastore_Task, 'duration_secs': 0.02991} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.633016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.633255] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1739.633490] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.633641] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.633819] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1739.634098] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2691456b-1a20-4615-8264-3d8b247f053c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.646832] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1739.647037] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1739.647795] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f92f339f-767c-4072-a8d2-1ad9f8fe1ed0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.656310] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1739.656310] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52398b7b-832f-09d8-1a04-b1b2909e22e6" [ 1739.656310] env[63297]: _type = "Task" [ 1739.656310] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.662624] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ed6bf15-5aa1-4373-9177-a1ae60eeac35 tempest-ServerDiskConfigTestJSON-1661101796 tempest-ServerDiskConfigTestJSON-1661101796-project-member] Lock "37a4719c-20b4-4cb3-b8fc-bfa28b906799" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.766s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.667109] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52398b7b-832f-09d8-1a04-b1b2909e22e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.690941] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: a0f4160e-cfb4-4d1d-bbee-6df44eb363fb] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1739.819591] env[63297]: INFO nova.compute.manager [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Detaching volume 3a1d595a-6c94-4766-b9a2-b46ebcc6b92c [ 1739.850362] env[63297]: INFO nova.virt.block_device [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Attempting to driver detach volume 3a1d595a-6c94-4766-b9a2-b46ebcc6b92c from mountpoint /dev/sdb [ 1739.850672] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1739.850911] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1739.851946] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ec36bb-8cc7-4494-a3f4-d4597d2a19c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.875730] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1ced39-defd-47ec-8132-4cd93adadf41 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.881876] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a784f79-b125-4fa8-8922-02a8f8d5a3da {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.903151] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee1c773-09f0-4555-9936-f2b3265556c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.918526] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] The volume has not been displaced from its original location: [datastore1] volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c/volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1739.924306] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1739.924637] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c204feb8-1701-4c5d-a2d3-a10f8ff90b42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.943318] env[63297]: DEBUG oslo_vmware.api [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1739.943318] env[63297]: value = "task-1698283" [ 1739.943318] env[63297]: _type = "Task" [ 1739.943318] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.951872] env[63297]: DEBUG oslo_vmware.api [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698283, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.145455] env[63297]: DEBUG oslo_concurrency.lockutils [None req-340c698c-24e5-48ed-8f53-1a4d865c9bad tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.167966] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52398b7b-832f-09d8-1a04-b1b2909e22e6, 'name': SearchDatastore_Task, 'duration_secs': 0.022062} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.168842] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac210fac-6ec7-49f7-9f34-e9b171fb19e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.174625] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1740.174625] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52553dd6-16ba-34e3-1b6a-7aa31a75269c" [ 1740.174625] env[63297]: _type = "Task" [ 1740.174625] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.184176] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52553dd6-16ba-34e3-1b6a-7aa31a75269c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.194727] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 310cf8d4-613a-4c35-b118-7d79138e4799] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1740.453380] env[63297]: DEBUG oslo_vmware.api [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698283, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.684377] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52553dd6-16ba-34e3-1b6a-7aa31a75269c, 'name': SearchDatastore_Task, 'duration_secs': 0.009522} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.684637] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.684895] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 927824d3-a98b-47b4-a850-1fb15fd0fbe4/927824d3-a98b-47b4-a850-1fb15fd0fbe4.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1740.685154] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01878888-0f87-48f8-a1e5-08063c3b718c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.691608] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1740.691608] env[63297]: value = "task-1698284" [ 1740.691608] env[63297]: _type = "Task" [ 1740.691608] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.699240] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: a10df0e9-4278-48f1-b111-864ac793f630] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1740.700999] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698284, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.955826] env[63297]: DEBUG oslo_vmware.api [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698283, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.203800] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: f5866b1e-cd77-464e-858e-eb14dab0637c] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1741.205781] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698284, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.432076} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.206045] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 927824d3-a98b-47b4-a850-1fb15fd0fbe4/927824d3-a98b-47b4-a850-1fb15fd0fbe4.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1741.206259] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1741.206504] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6399fb8-2211-446b-934e-e7f44845305d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.214663] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1741.214663] env[63297]: value = "task-1698285" [ 1741.214663] env[63297]: _type = "Task" [ 1741.214663] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.222610] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698285, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.456136] env[63297]: DEBUG oslo_vmware.api [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698283, 'name': ReconfigVM_Task, 'duration_secs': 1.235624} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.456376] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1741.460992] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adcfc9f6-33f2-43f5-9b10-50493ad50bb6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.475477] env[63297]: DEBUG oslo_vmware.api [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1741.475477] env[63297]: value = "task-1698286" [ 1741.475477] env[63297]: _type = "Task" [ 1741.475477] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.484469] env[63297]: DEBUG oslo_vmware.api [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698286, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.710174] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: be532612-7192-4771-a3dc-25bd1dc6be6b] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1741.724931] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698285, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.358743} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.725644] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1741.727441] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f5c148-aff4-4c52-927b-c6066ba9d7bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.751486] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 927824d3-a98b-47b4-a850-1fb15fd0fbe4/927824d3-a98b-47b4-a850-1fb15fd0fbe4.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1741.752650] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb67b973-19bb-4d79-a292-93f90e213b72 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.774966] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1741.774966] env[63297]: value = "task-1698287" [ 1741.774966] env[63297]: _type = "Task" [ 1741.774966] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.784808] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698287, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.987025] env[63297]: DEBUG oslo_vmware.api [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698286, 'name': ReconfigVM_Task, 'duration_secs': 0.244187} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.987554] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354013', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'name': 'volume-3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f87867c3-58d4-4bd6-b6ef-1608ebef6b22', 'attached_at': '', 'detached_at': '', 'volume_id': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c', 'serial': '3a1d595a-6c94-4766-b9a2-b46ebcc6b92c'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1742.213820] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 0b7b9f1b-d277-4219-92fb-e35a8b867e77] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1742.286670] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698287, 'name': ReconfigVM_Task, 'duration_secs': 0.31367} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.287061] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 927824d3-a98b-47b4-a850-1fb15fd0fbe4/927824d3-a98b-47b4-a850-1fb15fd0fbe4.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1742.287720] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57da19ef-cadc-48ca-b80d-1df69650f1f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.294870] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1742.294870] env[63297]: value = "task-1698288" [ 1742.294870] env[63297]: _type = "Task" [ 1742.294870] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.304741] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698288, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.541015] env[63297]: DEBUG nova.objects.instance [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'flavor' on Instance uuid f87867c3-58d4-4bd6-b6ef-1608ebef6b22 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1742.717748] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 57d93827-2a5a-4f12-a74b-147a1a934dd1] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1742.805662] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698288, 'name': Rename_Task, 'duration_secs': 0.499049} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.805963] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1742.806252] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8842258-fabb-4584-918d-b463ccddfcb8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.812349] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1742.812349] env[63297]: value = "task-1698289" [ 1742.812349] env[63297]: _type = "Task" [ 1742.812349] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.820715] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.222282] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 5cdb44c7-3dc1-4bce-8864-a1a40150e730] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1743.323150] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698289, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.549199] env[63297]: DEBUG oslo_concurrency.lockutils [None req-45f3bf92-199a-4377-b8b9-3011ceb0cc28 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.232s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.726372] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 5a868570-7504-4262-80b2-a458c219e689] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1743.823353] env[63297]: DEBUG oslo_vmware.api [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698289, 'name': PowerOnVM_Task, 'duration_secs': 0.542066} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.823751] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1743.824048] env[63297]: INFO nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Took 7.92 seconds to spawn the instance on the hypervisor. [ 1743.824249] env[63297]: DEBUG nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1743.825058] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a26c45e-d074-46ed-9331-26f03359b7ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.230610] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 44f4776e-d4a1-40ad-a03b-bb03582b95bd] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1744.341236] env[63297]: INFO nova.compute.manager [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Took 15.20 seconds to build instance. [ 1744.667311] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.667511] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.667731] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.667946] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.668160] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.670262] env[63297]: INFO nova.compute.manager [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Terminating instance [ 1744.672168] env[63297]: DEBUG nova.compute.manager [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1744.672363] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1744.673213] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2bde44-f966-4ce8-b8b5-c72a7348709b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.684852] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1744.684852] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-586dfad2-f7e8-45e9-bb82-9963d7456e3b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.688026] env[63297]: DEBUG oslo_vmware.api [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1744.688026] env[63297]: value = "task-1698290" [ 1744.688026] env[63297]: _type = "Task" [ 1744.688026] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.697690] env[63297]: DEBUG oslo_vmware.api [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.734449] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b5ef504b-1b68-4fa9-bfa1-b127ed8bcc6a] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1744.844055] env[63297]: DEBUG oslo_concurrency.lockutils [None req-1047418d-81c9-40cb-93b9-b4d60ea82af7 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.709s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.196621] env[63297]: DEBUG nova.compute.manager [req-584e2bcb-28db-4553-851e-6be071cdb962 req-85512c75-3402-466c-b6d9-42263d504d00 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-changed-da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1745.196831] env[63297]: DEBUG nova.compute.manager [req-584e2bcb-28db-4553-851e-6be071cdb962 req-85512c75-3402-466c-b6d9-42263d504d00 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing instance network info cache due to event network-changed-da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1745.197092] env[63297]: DEBUG oslo_concurrency.lockutils [req-584e2bcb-28db-4553-851e-6be071cdb962 req-85512c75-3402-466c-b6d9-42263d504d00 service nova] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.197262] env[63297]: DEBUG oslo_concurrency.lockutils [req-584e2bcb-28db-4553-851e-6be071cdb962 req-85512c75-3402-466c-b6d9-42263d504d00 service nova] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.197796] env[63297]: DEBUG nova.network.neutron [req-584e2bcb-28db-4553-851e-6be071cdb962 req-85512c75-3402-466c-b6d9-42263d504d00 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing network info cache for port da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1745.208806] env[63297]: DEBUG oslo_vmware.api [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698290, 'name': PowerOffVM_Task, 'duration_secs': 0.23963} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.208806] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1745.208806] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1745.209169] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97ff3627-fadc-4ec0-bcfb-95158e60e4af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.237869] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 4701073f-eeee-4f37-919a-4c53663ac15f] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1745.284907] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1745.285150] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1745.285384] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleting the datastore file [datastore1] f87867c3-58d4-4bd6-b6ef-1608ebef6b22 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1745.285663] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f388da3-b637-4213-8086-92f2d65c321b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.291958] env[63297]: DEBUG oslo_vmware.api [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1745.291958] env[63297]: value = "task-1698292" [ 1745.291958] env[63297]: _type = "Task" [ 1745.291958] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.300597] env[63297]: DEBUG oslo_vmware.api [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698292, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.742184] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: fc54a008-eb2e-4b10-86ea-be7c82b93139] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1745.802386] env[63297]: DEBUG oslo_vmware.api [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698292, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131941} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.802615] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1745.802765] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1745.802946] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1745.803146] env[63297]: INFO nova.compute.manager [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1745.803391] env[63297]: DEBUG oslo.service.loopingcall [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1745.803578] env[63297]: DEBUG nova.compute.manager [-] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1745.803673] env[63297]: DEBUG nova.network.neutron [-] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1746.091378] env[63297]: DEBUG oslo_concurrency.lockutils [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.091624] env[63297]: DEBUG oslo_concurrency.lockutils [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.245602] env[63297]: DEBUG nova.network.neutron [req-584e2bcb-28db-4553-851e-6be071cdb962 req-85512c75-3402-466c-b6d9-42263d504d00 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updated VIF entry in instance network info cache for port da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1746.246306] env[63297]: DEBUG nova.network.neutron [req-584e2bcb-28db-4553-851e-6be071cdb962 req-85512c75-3402-466c-b6d9-42263d504d00 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.247326] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 5914b3ce-f40f-4782-b56a-9fc29c819938] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1746.285703] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.285703] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.597878] env[63297]: DEBUG nova.compute.utils [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1746.663380] env[63297]: DEBUG nova.compute.manager [req-6d9c6552-274e-46b2-bbe6-5fe5cf4037dd req-7c58c018-6eaa-45f6-b1f8-1c77c7c568c4 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received event network-changed-29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1746.663505] env[63297]: DEBUG nova.compute.manager [req-6d9c6552-274e-46b2-bbe6-5fe5cf4037dd req-7c58c018-6eaa-45f6-b1f8-1c77c7c568c4 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing instance network info cache due to event network-changed-29d488c8-0537-45c6-97ba-b70aec11141b. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1746.663673] env[63297]: DEBUG oslo_concurrency.lockutils [req-6d9c6552-274e-46b2-bbe6-5fe5cf4037dd req-7c58c018-6eaa-45f6-b1f8-1c77c7c568c4 service nova] Acquiring lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.663748] env[63297]: DEBUG oslo_concurrency.lockutils [req-6d9c6552-274e-46b2-bbe6-5fe5cf4037dd req-7c58c018-6eaa-45f6-b1f8-1c77c7c568c4 service nova] Acquired lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.663931] env[63297]: DEBUG nova.network.neutron [req-6d9c6552-274e-46b2-bbe6-5fe5cf4037dd req-7c58c018-6eaa-45f6-b1f8-1c77c7c568c4 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing network info cache for port 29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1746.751614] env[63297]: DEBUG oslo_concurrency.lockutils [req-584e2bcb-28db-4553-851e-6be071cdb962 req-85512c75-3402-466c-b6d9-42263d504d00 service nova] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1746.752136] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 66b7a1e5-5e74-49db-99f3-4427d7297bf2] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1746.773976] env[63297]: INFO nova.compute.manager [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Detaching volume 4890cca6-2688-4f8a-89b5-29f10002cfe8 [ 1746.799912] env[63297]: DEBUG nova.network.neutron [-] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.811342] env[63297]: INFO nova.virt.block_device [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Attempting to driver detach volume 4890cca6-2688-4f8a-89b5-29f10002cfe8 from mountpoint /dev/sdb [ 1746.811342] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1746.811342] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354007', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'name': 'volume-4890cca6-2688-4f8a-89b5-29f10002cfe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '427c4ff0-1bf1-4bfb-b5c6-de6659148ab1', 'attached_at': '', 'detached_at': '', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'serial': '4890cca6-2688-4f8a-89b5-29f10002cfe8'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1746.811601] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59acf50-1926-494d-89f3-8d88c365b1b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.841182] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fec8d35-2be2-4c8a-8ac2-41a450d7afc6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.848160] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ba8512-0a56-49e4-9637-4347125d6116 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.870743] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85c4ba2-77c8-44de-a1d8-d474a662295e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.887305] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] The volume has not been displaced from its original location: [datastore1] volume-4890cca6-2688-4f8a-89b5-29f10002cfe8/volume-4890cca6-2688-4f8a-89b5-29f10002cfe8.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1746.892716] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Reconfiguring VM instance instance-0000005e to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1746.893129] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-283228be-1ec6-45b8-99ea-1c65f62393ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.912143] env[63297]: DEBUG oslo_vmware.api [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1746.912143] env[63297]: value = "task-1698293" [ 1746.912143] env[63297]: _type = "Task" [ 1746.912143] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.922691] env[63297]: DEBUG oslo_vmware.api [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698293, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.100605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.225453] env[63297]: DEBUG nova.compute.manager [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received event network-changed-29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1747.225562] env[63297]: DEBUG nova.compute.manager [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing instance network info cache due to event network-changed-29d488c8-0537-45c6-97ba-b70aec11141b. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1747.225733] env[63297]: DEBUG oslo_concurrency.lockutils [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] Acquiring lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.255513] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ac112251-8cc3-4f57-8983-8a07e2a068f8] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1747.303162] env[63297]: INFO nova.compute.manager [-] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Took 1.50 seconds to deallocate network for instance. [ 1747.359844] env[63297]: DEBUG nova.network.neutron [req-6d9c6552-274e-46b2-bbe6-5fe5cf4037dd req-7c58c018-6eaa-45f6-b1f8-1c77c7c568c4 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updated VIF entry in instance network info cache for port 29d488c8-0537-45c6-97ba-b70aec11141b. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1747.360245] env[63297]: DEBUG nova.network.neutron [req-6d9c6552-274e-46b2-bbe6-5fe5cf4037dd req-7c58c018-6eaa-45f6-b1f8-1c77c7c568c4 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.422284] env[63297]: DEBUG oslo_vmware.api [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698293, 'name': ReconfigVM_Task, 'duration_secs': 0.241997} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.422558] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Reconfigured VM instance instance-0000005e to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1747.427343] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b82eb2cb-daa1-4945-b405-821c0ce73f72 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.442330] env[63297]: DEBUG oslo_vmware.api [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1747.442330] env[63297]: value = "task-1698294" [ 1747.442330] env[63297]: _type = "Task" [ 1747.442330] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.449753] env[63297]: DEBUG oslo_vmware.api [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.759086] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: efaa465d-f6b2-4891-8e96-b4c3af052759] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1747.808013] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.808293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.808514] env[63297]: DEBUG nova.objects.instance [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'resources' on Instance uuid f87867c3-58d4-4bd6-b6ef-1608ebef6b22 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1747.862913] env[63297]: DEBUG oslo_concurrency.lockutils [req-6d9c6552-274e-46b2-bbe6-5fe5cf4037dd req-7c58c018-6eaa-45f6-b1f8-1c77c7c568c4 service nova] Releasing lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.863399] env[63297]: DEBUG oslo_concurrency.lockutils [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] Acquired lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.863613] env[63297]: DEBUG nova.network.neutron [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing network info cache for port 29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1747.952523] env[63297]: DEBUG oslo_vmware.api [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698294, 'name': ReconfigVM_Task, 'duration_secs': 0.143653} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.952814] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354007', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'name': 'volume-4890cca6-2688-4f8a-89b5-29f10002cfe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '427c4ff0-1bf1-4bfb-b5c6-de6659148ab1', 'attached_at': '', 'detached_at': '', 'volume_id': '4890cca6-2688-4f8a-89b5-29f10002cfe8', 'serial': '4890cca6-2688-4f8a-89b5-29f10002cfe8'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1748.161652] env[63297]: DEBUG oslo_concurrency.lockutils [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.161930] env[63297]: DEBUG oslo_concurrency.lockutils [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.162190] env[63297]: INFO nova.compute.manager [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Attaching volume 0fa97388-d5df-41d2-befe-de9bd7aac345 to /dev/sdb [ 1748.193282] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dee45e6-7fea-4e71-9537-7d592c4f30a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.200834] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deaa0a0f-00ad-4d87-9d96-995f5a01694d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.215059] env[63297]: DEBUG nova.virt.block_device [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating existing volume attachment record: e3e22fe9-b1e9-4d57-a398-65452073ec69 {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1748.262701] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 71faf167-dfe3-4792-9841-b5ab4b333884] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1748.412172] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de62d27c-49a3-481e-b173-e49de9a3d3f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.418910] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f9e87c-bbd7-4a21-b472-0e49e1d1b281 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.450245] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c307edd8-d4fc-465b-967e-f560c263c0f2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.456972] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8322df39-eb60-4838-ba04-0e75e2c2f320 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.469794] env[63297]: DEBUG nova.compute.provider_tree [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1748.496497] env[63297]: DEBUG nova.objects.instance [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'flavor' on Instance uuid 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1748.589989] env[63297]: DEBUG nova.network.neutron [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updated VIF entry in instance network info cache for port 29d488c8-0537-45c6-97ba-b70aec11141b. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1748.590396] env[63297]: DEBUG nova.network.neutron [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.689173] env[63297]: DEBUG nova.compute.manager [req-96b39b78-ce83-41cb-8965-b0daea55893b req-9faa1e31-d4bb-45a8-b748-70cf81e0ac3a service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-changed-da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1748.689444] env[63297]: DEBUG nova.compute.manager [req-96b39b78-ce83-41cb-8965-b0daea55893b req-9faa1e31-d4bb-45a8-b748-70cf81e0ac3a service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing instance network info cache due to event network-changed-da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1748.689665] env[63297]: DEBUG oslo_concurrency.lockutils [req-96b39b78-ce83-41cb-8965-b0daea55893b req-9faa1e31-d4bb-45a8-b748-70cf81e0ac3a service nova] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1748.689857] env[63297]: DEBUG oslo_concurrency.lockutils [req-96b39b78-ce83-41cb-8965-b0daea55893b req-9faa1e31-d4bb-45a8-b748-70cf81e0ac3a service nova] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1748.690091] env[63297]: DEBUG nova.network.neutron [req-96b39b78-ce83-41cb-8965-b0daea55893b req-9faa1e31-d4bb-45a8-b748-70cf81e0ac3a service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing network info cache for port da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1748.766620] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 96265295-6b0c-4803-bb89-6166c9d3fc7f] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1748.974400] env[63297]: DEBUG nova.scheduler.client.report [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1749.093231] env[63297]: DEBUG oslo_concurrency.lockutils [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] Releasing lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.093440] env[63297]: DEBUG nova.compute.manager [req-daa78209-671a-4e44-975a-71c8bf11bb8d req-e2779aa6-d0a1-4823-9441-2d662f075760 service nova] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Received event network-vif-deleted-8b0d7883-16da-4bdb-b728-dbcd6772ccdb {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1749.269519] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 89c9cd40-585e-4ae6-88b3-1a33a94c3b52] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1749.391470] env[63297]: DEBUG nova.network.neutron [req-96b39b78-ce83-41cb-8965-b0daea55893b req-9faa1e31-d4bb-45a8-b748-70cf81e0ac3a service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updated VIF entry in instance network info cache for port da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1749.391932] env[63297]: DEBUG nova.network.neutron [req-96b39b78-ce83-41cb-8965-b0daea55893b req-9faa1e31-d4bb-45a8-b748-70cf81e0ac3a service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.478966] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.498510] env[63297]: INFO nova.scheduler.client.report [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted allocations for instance f87867c3-58d4-4bd6-b6ef-1608ebef6b22 [ 1749.503127] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a34dd8e1-057a-4d51-b1b0-991c04e5127e tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.231s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.775406] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b261c90f-642d-42b7-8b79-d87eeaf0537a] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1749.894216] env[63297]: DEBUG oslo_concurrency.lockutils [req-96b39b78-ce83-41cb-8965-b0daea55893b req-9faa1e31-d4bb-45a8-b748-70cf81e0ac3a service nova] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.007305] env[63297]: DEBUG oslo_concurrency.lockutils [None req-649ae565-cd45-4329-82b3-d5de9accde18 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "f87867c3-58d4-4bd6-b6ef-1608ebef6b22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.340s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.279456] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b1ed5d76-d358-49d3-a854-8f968bc987ad] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1750.783728] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 765f3232-f3f9-4d9b-92f2-fb6603f2a90a] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1750.843079] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.843346] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.843550] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.843730] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.843899] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.846069] env[63297]: INFO nova.compute.manager [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Terminating instance [ 1750.847818] env[63297]: DEBUG nova.compute.manager [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1750.848077] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1750.848929] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b570caf4-c2ce-4ba5-9059-57b26e94d43c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.856618] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1750.856861] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-443f9578-c180-4f04-8fac-452fed875aad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.863372] env[63297]: DEBUG oslo_vmware.api [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1750.863372] env[63297]: value = "task-1698299" [ 1750.863372] env[63297]: _type = "Task" [ 1750.863372] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.871686] env[63297]: DEBUG oslo_vmware.api [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.286278] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.286632] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.287760] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b95b7656-70ac-4eaf-9934-4b4c50e78035] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1751.374054] env[63297]: DEBUG oslo_vmware.api [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698299, 'name': PowerOffVM_Task, 'duration_secs': 0.236974} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.374054] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1751.374054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1751.374320] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae01b25c-07d4-42f7-903d-d983d18576ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.539072] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1751.539072] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1751.539072] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleting the datastore file [datastore1] 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1751.539072] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbe3bc04-93e7-4e13-8b3a-8030d2f75587 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.546199] env[63297]: DEBUG oslo_vmware.api [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1751.546199] env[63297]: value = "task-1698301" [ 1751.546199] env[63297]: _type = "Task" [ 1751.546199] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.553934] env[63297]: DEBUG oslo_vmware.api [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.790756] env[63297]: DEBUG nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1751.793161] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 5124f7fb-1293-4964-98c4-426ecfce7d10] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1752.055996] env[63297]: DEBUG oslo_vmware.api [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140082} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.056207] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1752.056392] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1752.056568] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1752.056740] env[63297]: INFO nova.compute.manager [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1752.056986] env[63297]: DEBUG oslo.service.loopingcall [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1752.057184] env[63297]: DEBUG nova.compute.manager [-] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1752.057277] env[63297]: DEBUG nova.network.neutron [-] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1752.298534] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 8fa5fef6-8768-4e24-aab3-db56a10588c2] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1752.320537] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.320826] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.322782] env[63297]: INFO nova.compute.claims [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1752.492781] env[63297]: DEBUG nova.compute.manager [req-019f841d-c58f-417d-bf6e-6cb11aee1d3e req-b79c9f80-0620-44ec-a405-b06974a12dbc service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Received event network-vif-deleted-bb862c99-f006-416a-9b98-0fb287a5d194 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.493085] env[63297]: INFO nova.compute.manager [req-019f841d-c58f-417d-bf6e-6cb11aee1d3e req-b79c9f80-0620-44ec-a405-b06974a12dbc service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Neutron deleted interface bb862c99-f006-416a-9b98-0fb287a5d194; detaching it from the instance and deleting it from the info cache [ 1752.493272] env[63297]: DEBUG nova.network.neutron [req-019f841d-c58f-417d-bf6e-6cb11aee1d3e req-b79c9f80-0620-44ec-a405-b06974a12dbc service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.804351] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 1ca9c4f6-3202-4819-8d96-bd7ee4e0f23f] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1752.970403] env[63297]: DEBUG nova.network.neutron [-] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.996657] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-238d6f8e-303b-49f6-afac-266edb501c40 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.006612] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee77f4f-4fbe-44f7-a148-3c8f479240ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.036479] env[63297]: DEBUG nova.compute.manager [req-019f841d-c58f-417d-bf6e-6cb11aee1d3e req-b79c9f80-0620-44ec-a405-b06974a12dbc service nova] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Detach interface failed, port_id=bb862c99-f006-416a-9b98-0fb287a5d194, reason: Instance 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1753.256960] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1753.257264] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354020', 'volume_id': '0fa97388-d5df-41d2-befe-de9bd7aac345', 'name': 'volume-0fa97388-d5df-41d2-befe-de9bd7aac345', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d', 'attached_at': '', 'detached_at': '', 'volume_id': '0fa97388-d5df-41d2-befe-de9bd7aac345', 'serial': '0fa97388-d5df-41d2-befe-de9bd7aac345'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1753.258164] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab46923-c62d-409d-9620-ae9b143ec273 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.276092] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836c78f5-b575-4d58-a398-207407983ba6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.300954] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-0fa97388-d5df-41d2-befe-de9bd7aac345/volume-0fa97388-d5df-41d2-befe-de9bd7aac345.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1753.301337] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34353402-c0d6-4573-b583-db7d4d55529a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.315030] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: fba9040d-f904-44a1-8785-14d4696ea939] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1753.322423] env[63297]: DEBUG oslo_vmware.api [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1753.322423] env[63297]: value = "task-1698302" [ 1753.322423] env[63297]: _type = "Task" [ 1753.322423] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.333576] env[63297]: DEBUG oslo_vmware.api [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.433507] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b2bd4b-fec7-43d4-b04f-070d1a307762 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.442262] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79b3649-97d6-4268-8a13-afb6c4b85210 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.472871] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c954ab-7b80-4d9e-ac1e-89514a6f9266 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.475449] env[63297]: INFO nova.compute.manager [-] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Took 1.42 seconds to deallocate network for instance. [ 1753.484044] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8e6c57-b2c5-4f23-8566-4adccb1a0c14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.499330] env[63297]: DEBUG nova.compute.provider_tree [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1753.818156] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b853b581-ea46-4455-8cdb-6ea2f31c22be] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1753.831993] env[63297]: DEBUG oslo_vmware.api [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698302, 'name': ReconfigVM_Task, 'duration_secs': 0.349182} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.832423] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-0fa97388-d5df-41d2-befe-de9bd7aac345/volume-0fa97388-d5df-41d2-befe-de9bd7aac345.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1753.837847] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03651fa6-3a7a-4d05-ac69-c71180013ab6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.853932] env[63297]: DEBUG oslo_vmware.api [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1753.853932] env[63297]: value = "task-1698303" [ 1753.853932] env[63297]: _type = "Task" [ 1753.853932] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.862479] env[63297]: DEBUG oslo_vmware.api [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698303, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.981550] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.003808] env[63297]: DEBUG nova.scheduler.client.report [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1754.322213] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 14b4518e-044a-451a-845d-fa3742e5b3e2] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1754.364664] env[63297]: DEBUG oslo_vmware.api [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698303, 'name': ReconfigVM_Task, 'duration_secs': 0.15204} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.366054] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354020', 'volume_id': '0fa97388-d5df-41d2-befe-de9bd7aac345', 'name': 'volume-0fa97388-d5df-41d2-befe-de9bd7aac345', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d', 'attached_at': '', 'detached_at': '', 'volume_id': '0fa97388-d5df-41d2-befe-de9bd7aac345', 'serial': '0fa97388-d5df-41d2-befe-de9bd7aac345'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1754.508740] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.509260] env[63297]: DEBUG nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1754.511867] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.531s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.512149] env[63297]: DEBUG nova.objects.instance [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'resources' on Instance uuid 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.827723] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b14e8466-68ab-4705-a439-6db961a149b0] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1755.016237] env[63297]: DEBUG nova.compute.utils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1755.019880] env[63297]: DEBUG nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1755.020069] env[63297]: DEBUG nova.network.neutron [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1755.069436] env[63297]: DEBUG nova.policy [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d8413d4aad4ed8a1fa9e436de117ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc81b0f87c64b2283eb0ece21fb31a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1755.114749] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96704544-c74d-418a-bb55-601e2f0ccd6c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.122733] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655abe6d-8cd2-4b28-8ca5-678a26d34f1b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.154196] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c392f16-220a-4a56-9752-20c425c9e8f6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.164102] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb68d44-20ca-4805-b631-e08ae84d4b00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.178471] env[63297]: DEBUG nova.compute.provider_tree [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1755.318690] env[63297]: DEBUG nova.network.neutron [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Successfully created port: a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1755.331501] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 9b1306f9-4b0a-4116-8e79-271478f33490] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1755.402709] env[63297]: DEBUG nova.objects.instance [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'flavor' on Instance uuid 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1755.520721] env[63297]: DEBUG nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1755.682211] env[63297]: DEBUG nova.scheduler.client.report [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1755.835135] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.835335] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances with incomplete migration {{(pid=63297) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1755.908914] env[63297]: DEBUG oslo_concurrency.lockutils [None req-775a7865-96f1-4706-ae92-9c71fc911985 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.747s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.188026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.206017] env[63297]: INFO nova.scheduler.client.report [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted allocations for instance 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1 [ 1756.338545] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.530361] env[63297]: DEBUG nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1756.557858] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1756.558129] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1756.558294] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1756.558471] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1756.558615] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1756.558753] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1756.558956] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1756.559129] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1756.559298] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1756.559457] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1756.559626] env[63297]: DEBUG nova.virt.hardware [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1756.560493] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d01ebb-8986-4027-b591-9a0062073f42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.568590] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b3e0f9-6fea-49de-8625-0f1e13ef6b21 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.688074] env[63297]: DEBUG nova.compute.manager [req-3a2d54cf-3bf7-4d92-bb81-11c3ec9b32d8 req-a04a5b4d-5d66-47e8-8d50-e656e93dae04 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Received event network-vif-plugged-a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1756.688354] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a2d54cf-3bf7-4d92-bb81-11c3ec9b32d8 req-a04a5b4d-5d66-47e8-8d50-e656e93dae04 service nova] Acquiring lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.688576] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a2d54cf-3bf7-4d92-bb81-11c3ec9b32d8 req-a04a5b4d-5d66-47e8-8d50-e656e93dae04 service nova] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.688745] env[63297]: DEBUG oslo_concurrency.lockutils [req-3a2d54cf-3bf7-4d92-bb81-11c3ec9b32d8 req-a04a5b4d-5d66-47e8-8d50-e656e93dae04 service nova] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.688909] env[63297]: DEBUG nova.compute.manager [req-3a2d54cf-3bf7-4d92-bb81-11c3ec9b32d8 req-a04a5b4d-5d66-47e8-8d50-e656e93dae04 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] No waiting events found dispatching network-vif-plugged-a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1756.689114] env[63297]: WARNING nova.compute.manager [req-3a2d54cf-3bf7-4d92-bb81-11c3ec9b32d8 req-a04a5b4d-5d66-47e8-8d50-e656e93dae04 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Received unexpected event network-vif-plugged-a995a9af-3d84-43dd-8695-17446ea38fc8 for instance with vm_state building and task_state spawning. [ 1756.712739] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d5ef032-94e5-4de4-acd4-0b41c27b6107 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "427c4ff0-1bf1-4bfb-b5c6-de6659148ab1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.869s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.773644] env[63297]: DEBUG nova.network.neutron [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Successfully updated port: a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1757.259180] env[63297]: DEBUG nova.compute.manager [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1757.277698] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.277698] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.277698] env[63297]: DEBUG nova.network.neutron [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1757.777426] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.777690] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.815275] env[63297]: DEBUG nova.network.neutron [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1757.963849] env[63297]: DEBUG nova.network.neutron [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updating instance_info_cache with network_info: [{"id": "a995a9af-3d84-43dd-8695-17446ea38fc8", "address": "fa:16:3e:8b:0b:e5", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa995a9af-3d", "ovs_interfaceid": "a995a9af-3d84-43dd-8695-17446ea38fc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.285074] env[63297]: INFO nova.compute.claims [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1758.466577] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.466847] env[63297]: DEBUG nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Instance network_info: |[{"id": "a995a9af-3d84-43dd-8695-17446ea38fc8", "address": "fa:16:3e:8b:0b:e5", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa995a9af-3d", "ovs_interfaceid": "a995a9af-3d84-43dd-8695-17446ea38fc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1758.467283] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:0b:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a995a9af-3d84-43dd-8695-17446ea38fc8', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1758.474804] env[63297]: DEBUG oslo.service.loopingcall [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1758.475042] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1758.475683] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6772945e-6165-4436-80db-04d303ed79ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.495064] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1758.495064] env[63297]: value = "task-1698304" [ 1758.495064] env[63297]: _type = "Task" [ 1758.495064] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.503056] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698304, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.713995] env[63297]: DEBUG nova.compute.manager [req-48dfbb16-1381-4f69-af3d-5dfb86a2ce69 req-4ed48c41-ffa9-40ff-8363-33fba807d0c5 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Received event network-changed-a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1758.714208] env[63297]: DEBUG nova.compute.manager [req-48dfbb16-1381-4f69-af3d-5dfb86a2ce69 req-4ed48c41-ffa9-40ff-8363-33fba807d0c5 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Refreshing instance network info cache due to event network-changed-a995a9af-3d84-43dd-8695-17446ea38fc8. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1758.714441] env[63297]: DEBUG oslo_concurrency.lockutils [req-48dfbb16-1381-4f69-af3d-5dfb86a2ce69 req-4ed48c41-ffa9-40ff-8363-33fba807d0c5 service nova] Acquiring lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1758.714600] env[63297]: DEBUG oslo_concurrency.lockutils [req-48dfbb16-1381-4f69-af3d-5dfb86a2ce69 req-4ed48c41-ffa9-40ff-8363-33fba807d0c5 service nova] Acquired lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1758.714772] env[63297]: DEBUG nova.network.neutron [req-48dfbb16-1381-4f69-af3d-5dfb86a2ce69 req-4ed48c41-ffa9-40ff-8363-33fba807d0c5 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Refreshing network info cache for port a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1758.791769] env[63297]: INFO nova.compute.resource_tracker [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating resource usage from migration 1da2ee28-cb5f-4925-825a-0db5daf4f8d3 [ 1758.856240] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.856476] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.911313] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f65c6c2-625e-4b37-9c63-ca5f26b7e711 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.919224] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59247e3b-6a85-4101-87fb-a5ca92ee6bcc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.948714] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150bf99b-a196-42a7-a854-924a122fa28f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.955712] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff5c5b2-4f85-4d29-9f5c-bef27f3e7f22 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.968333] env[63297]: DEBUG nova.compute.provider_tree [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1759.003647] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698304, 'name': CreateVM_Task, 'duration_secs': 0.341186} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.003815] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1759.004472] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.004637] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.004962] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1759.005273] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4572e642-fe4e-40d0-bb63-44f99a8270e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.009440] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1759.009440] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527db1c3-556d-c772-9810-33ca125660a4" [ 1759.009440] env[63297]: _type = "Task" [ 1759.009440] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.017855] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527db1c3-556d-c772-9810-33ca125660a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.359171] env[63297]: DEBUG nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1759.426884] env[63297]: DEBUG nova.network.neutron [req-48dfbb16-1381-4f69-af3d-5dfb86a2ce69 req-4ed48c41-ffa9-40ff-8363-33fba807d0c5 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updated VIF entry in instance network info cache for port a995a9af-3d84-43dd-8695-17446ea38fc8. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1759.427257] env[63297]: DEBUG nova.network.neutron [req-48dfbb16-1381-4f69-af3d-5dfb86a2ce69 req-4ed48c41-ffa9-40ff-8363-33fba807d0c5 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updating instance_info_cache with network_info: [{"id": "a995a9af-3d84-43dd-8695-17446ea38fc8", "address": "fa:16:3e:8b:0b:e5", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa995a9af-3d", "ovs_interfaceid": "a995a9af-3d84-43dd-8695-17446ea38fc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.470943] env[63297]: DEBUG nova.scheduler.client.report [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1759.519577] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527db1c3-556d-c772-9810-33ca125660a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009663} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.519893] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.520192] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1759.520456] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.520608] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.520787] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1759.521063] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9cef68ce-1cc8-4c98-9bff-7ff0a90198fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.529649] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1759.529823] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1759.530536] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afec8398-060e-4e18-8c84-22243ee4ab80 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.535292] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1759.535292] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ef3066-a677-9fff-62ff-6f40649250be" [ 1759.535292] env[63297]: _type = "Task" [ 1759.535292] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.542952] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ef3066-a677-9fff-62ff-6f40649250be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.885330] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.929371] env[63297]: DEBUG oslo_concurrency.lockutils [req-48dfbb16-1381-4f69-af3d-5dfb86a2ce69 req-4ed48c41-ffa9-40ff-8363-33fba807d0c5 service nova] Releasing lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.976660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.199s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.976861] env[63297]: INFO nova.compute.manager [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Migrating [ 1759.983297] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.098s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.985859] env[63297]: INFO nova.compute.claims [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1760.049709] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ef3066-a677-9fff-62ff-6f40649250be, 'name': SearchDatastore_Task, 'duration_secs': 0.008536} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.050961] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18938065-50d9-47f5-a89d-fb42ab5d6dcc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.057810] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1760.057810] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e31b82-aa54-d190-4e9b-7ed06ed932c9" [ 1760.057810] env[63297]: _type = "Task" [ 1760.057810] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.066144] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e31b82-aa54-d190-4e9b-7ed06ed932c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.498597] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.498783] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.498950] env[63297]: DEBUG nova.network.neutron [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1760.568295] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e31b82-aa54-d190-4e9b-7ed06ed932c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009406} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.568569] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.568822] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c257ee27-8d87-4fe6-a953-cc4af1ec36d6/c257ee27-8d87-4fe6-a953-cc4af1ec36d6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1760.569090] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccd3610c-a87f-404e-8ce4-fc7134a2e85c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.576181] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1760.576181] env[63297]: value = "task-1698305" [ 1760.576181] env[63297]: _type = "Task" [ 1760.576181] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.586706] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.983778] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-c1696ee9-cb48-414c-b0a0-b6fa2e880a81-aa5d34db-425d-4449-81ca-fd7712125808" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.984196] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-c1696ee9-cb48-414c-b0a0-b6fa2e880a81-aa5d34db-425d-4449-81ca-fd7712125808" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.984395] env[63297]: DEBUG nova.objects.instance [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'flavor' on Instance uuid c1696ee9-cb48-414c-b0a0-b6fa2e880a81 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1761.087138] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698305, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.400162} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.087403] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c257ee27-8d87-4fe6-a953-cc4af1ec36d6/c257ee27-8d87-4fe6-a953-cc4af1ec36d6.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1761.087637] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1761.087881] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6547ab5d-26a7-4fd0-9cf4-12ed7faa454e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.097334] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1761.097334] env[63297]: value = "task-1698306" [ 1761.097334] env[63297]: _type = "Task" [ 1761.097334] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.107354] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698306, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.119208] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f71690-95c8-41ff-81c4-3002ba313dcf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.128509] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c19933-ea61-41b3-bcf1-5e9bd8be58a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.161430] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12155854-c9ef-4e83-a235-1907853e9166 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.168692] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69af5cb0-aad5-4995-b783-c52676548066 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.181923] env[63297]: DEBUG nova.compute.provider_tree [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1761.228733] env[63297]: DEBUG nova.network.neutron [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance_info_cache with network_info: [{"id": "c31a25f5-7d02-427f-932a-464daf59e755", "address": "fa:16:3e:a0:49:93", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc31a25f5-7d", "ovs_interfaceid": "c31a25f5-7d02-427f-932a-464daf59e755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.392274] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.392544] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.578576] env[63297]: DEBUG nova.objects.instance [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'pci_requests' on Instance uuid c1696ee9-cb48-414c-b0a0-b6fa2e880a81 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1761.607248] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698306, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087546} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.607511] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1761.608274] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3d1d9a-acbd-4379-8eb7-be556edf87e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.630172] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] c257ee27-8d87-4fe6-a953-cc4af1ec36d6/c257ee27-8d87-4fe6-a953-cc4af1ec36d6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1761.630411] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1359f420-7716-4295-9cd6-139accc5c97e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.648910] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1761.648910] env[63297]: value = "task-1698307" [ 1761.648910] env[63297]: _type = "Task" [ 1761.648910] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.656405] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698307, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.684653] env[63297]: DEBUG nova.scheduler.client.report [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1761.731402] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.895382] env[63297]: DEBUG nova.compute.utils [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1762.080746] env[63297]: DEBUG nova.objects.base [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1762.081118] env[63297]: DEBUG nova.network.neutron [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1762.147780] env[63297]: DEBUG nova.policy [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1762.158540] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698307, 'name': ReconfigVM_Task, 'duration_secs': 0.277165} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.158790] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Reconfigured VM instance instance-00000070 to attach disk [datastore1] c257ee27-8d87-4fe6-a953-cc4af1ec36d6/c257ee27-8d87-4fe6-a953-cc4af1ec36d6.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1762.159458] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5993a0ff-658f-4498-bb45-031d61abf823 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.165784] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1762.165784] env[63297]: value = "task-1698308" [ 1762.165784] env[63297]: _type = "Task" [ 1762.165784] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.172936] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698308, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.190144] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.190621] env[63297]: DEBUG nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1762.398644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.675537] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698308, 'name': Rename_Task, 'duration_secs': 0.138983} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.675815] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1762.676073] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ccbd3c3d-ab8e-479e-b1c9-d590df5d8421 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.682775] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1762.682775] env[63297]: value = "task-1698309" [ 1762.682775] env[63297]: _type = "Task" [ 1762.682775] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.691247] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698309, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.695518] env[63297]: DEBUG nova.compute.utils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1762.696889] env[63297]: DEBUG nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1762.696999] env[63297]: DEBUG nova.network.neutron [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1762.736798] env[63297]: DEBUG nova.policy [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1012cd7cb62c4ef593edecd3a38f4acb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01fe9157b11244cb86a7626caae0616d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1763.039884] env[63297]: DEBUG nova.network.neutron [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Successfully created port: 8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1763.194182] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698309, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.202105] env[63297]: DEBUG nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1763.248975] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dce96f2-fb0c-490e-9f44-215a3d107d42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.274558] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1763.470059] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.470197] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.470343] env[63297]: INFO nova.compute.manager [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Attaching volume 2b804b7b-6e07-4705-9d8b-8906c4ad1af1 to /dev/sdb [ 1763.500759] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefb0938-75ac-4091-9d22-45eb5bdc3a9a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.508383] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc9d9c1-7f97-4810-a466-117d839f1410 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.523553] env[63297]: DEBUG nova.virt.block_device [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Updating existing volume attachment record: 4a41eb56-1fb4-4dfb-bda6-846c50ed9a51 {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1763.537172] env[63297]: DEBUG nova.compute.manager [req-4b4ade00-02dd-4d74-be80-61449cfba7b0 req-454faeec-375a-4662-af89-8a69c49389d1 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-vif-plugged-aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1763.537277] env[63297]: DEBUG oslo_concurrency.lockutils [req-4b4ade00-02dd-4d74-be80-61449cfba7b0 req-454faeec-375a-4662-af89-8a69c49389d1 service nova] Acquiring lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.537505] env[63297]: DEBUG oslo_concurrency.lockutils [req-4b4ade00-02dd-4d74-be80-61449cfba7b0 req-454faeec-375a-4662-af89-8a69c49389d1 service nova] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.537637] env[63297]: DEBUG oslo_concurrency.lockutils [req-4b4ade00-02dd-4d74-be80-61449cfba7b0 req-454faeec-375a-4662-af89-8a69c49389d1 service nova] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.537798] env[63297]: DEBUG nova.compute.manager [req-4b4ade00-02dd-4d74-be80-61449cfba7b0 req-454faeec-375a-4662-af89-8a69c49389d1 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] No waiting events found dispatching network-vif-plugged-aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1763.537967] env[63297]: WARNING nova.compute.manager [req-4b4ade00-02dd-4d74-be80-61449cfba7b0 req-454faeec-375a-4662-af89-8a69c49389d1 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received unexpected event network-vif-plugged-aa5d34db-425d-4449-81ca-fd7712125808 for instance with vm_state active and task_state None. [ 1763.620055] env[63297]: DEBUG nova.network.neutron [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Successfully updated port: aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1763.694083] env[63297]: DEBUG oslo_vmware.api [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698309, 'name': PowerOnVM_Task, 'duration_secs': 0.627525} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.694399] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1763.694612] env[63297]: INFO nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1763.694788] env[63297]: DEBUG nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1763.695604] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96dc9240-dcad-4349-93c5-c542a410a569 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.781543] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1763.781969] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fa5565e-0a16-49d2-8e7a-2329609e3222 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.789709] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1763.789709] env[63297]: value = "task-1698311" [ 1763.789709] env[63297]: _type = "Task" [ 1763.789709] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.798896] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698311, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.122899] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.122991] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.123351] env[63297]: DEBUG nova.network.neutron [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1764.213586] env[63297]: DEBUG nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1764.228032] env[63297]: INFO nova.compute.manager [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Took 11.93 seconds to build instance. [ 1764.260671] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1764.260910] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1764.261079] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1764.261266] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1764.261425] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1764.261575] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1764.261780] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1764.261941] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1764.262104] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1764.262265] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1764.262675] env[63297]: DEBUG nova.virt.hardware [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1764.263486] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d370db4-9554-4095-be4c-a62dab5dfce9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.275481] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca356b41-3e5f-4a3b-bb01-8b9f75bf21ce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.298164] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698311, 'name': PowerOffVM_Task, 'duration_secs': 0.274064} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.298426] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1764.298602] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1764.657691] env[63297]: WARNING nova.network.neutron [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] 250ab20f-3057-41ed-bb65-926464a4e926 already exists in list: networks containing: ['250ab20f-3057-41ed-bb65-926464a4e926']. ignoring it [ 1764.729971] env[63297]: DEBUG oslo_concurrency.lockutils [None req-15c68206-f825-44c6-8329-5a3ab4bedb5d tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.443s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.807308] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1764.807639] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1764.807829] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1764.808085] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1764.808357] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1764.808530] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1764.808811] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1764.808998] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1764.809239] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1764.809442] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1764.809805] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1764.816498] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63f99feb-22d7-4d89-b1f7-6b9413e9d5bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.836622] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1764.836622] env[63297]: value = "task-1698312" [ 1764.836622] env[63297]: _type = "Task" [ 1764.836622] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.846813] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698312, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.856940] env[63297]: DEBUG nova.network.neutron [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Successfully updated port: 8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1764.935078] env[63297]: DEBUG nova.compute.manager [req-2ce38772-2553-454c-bb9b-49b5e5c7d9ce req-7650eaf8-0ac1-48a9-862c-a750329a6518 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received event network-vif-plugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1764.935351] env[63297]: DEBUG oslo_concurrency.lockutils [req-2ce38772-2553-454c-bb9b-49b5e5c7d9ce req-7650eaf8-0ac1-48a9-862c-a750329a6518 service nova] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.935569] env[63297]: DEBUG oslo_concurrency.lockutils [req-2ce38772-2553-454c-bb9b-49b5e5c7d9ce req-7650eaf8-0ac1-48a9-862c-a750329a6518 service nova] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.935734] env[63297]: DEBUG oslo_concurrency.lockutils [req-2ce38772-2553-454c-bb9b-49b5e5c7d9ce req-7650eaf8-0ac1-48a9-862c-a750329a6518 service nova] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.935898] env[63297]: DEBUG nova.compute.manager [req-2ce38772-2553-454c-bb9b-49b5e5c7d9ce req-7650eaf8-0ac1-48a9-862c-a750329a6518 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] No waiting events found dispatching network-vif-plugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1764.936074] env[63297]: WARNING nova.compute.manager [req-2ce38772-2553-454c-bb9b-49b5e5c7d9ce req-7650eaf8-0ac1-48a9-862c-a750329a6518 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received unexpected event network-vif-plugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 for instance with vm_state building and task_state spawning. [ 1764.972225] env[63297]: DEBUG nova.network.neutron [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa5d34db-425d-4449-81ca-fd7712125808", "address": "fa:16:3e:7b:37:2f", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa5d34db-42", "ovs_interfaceid": "aa5d34db-425d-4449-81ca-fd7712125808", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1765.347906] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698312, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.360029] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.360289] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.360449] env[63297]: DEBUG nova.network.neutron [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1765.475322] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.476099] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.476339] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.477601] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbe7c57-db20-4e6f-b68e-d8ebeffedd3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.496562] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1765.497662] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1765.497943] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1765.498175] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1765.498334] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1765.498491] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1765.498691] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1765.498846] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1765.499018] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1765.499182] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1765.499388] env[63297]: DEBUG nova.virt.hardware [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1765.506043] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Reconfiguring VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1765.506724] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a7c340d-14dc-4bcc-8745-37520f2c00e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.525488] env[63297]: DEBUG oslo_vmware.api [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1765.525488] env[63297]: value = "task-1698313" [ 1765.525488] env[63297]: _type = "Task" [ 1765.525488] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.533961] env[63297]: DEBUG oslo_vmware.api [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698313, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.561613] env[63297]: DEBUG nova.compute.manager [req-bc72195c-86c6-4b1b-b849-94b66c77c565 req-d5aa5c3e-09e1-4999-8a70-6ca8eb3f1792 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-changed-aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1765.561796] env[63297]: DEBUG nova.compute.manager [req-bc72195c-86c6-4b1b-b849-94b66c77c565 req-d5aa5c3e-09e1-4999-8a70-6ca8eb3f1792 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing instance network info cache due to event network-changed-aa5d34db-425d-4449-81ca-fd7712125808. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1765.562012] env[63297]: DEBUG oslo_concurrency.lockutils [req-bc72195c-86c6-4b1b-b849-94b66c77c565 req-d5aa5c3e-09e1-4999-8a70-6ca8eb3f1792 service nova] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.562162] env[63297]: DEBUG oslo_concurrency.lockutils [req-bc72195c-86c6-4b1b-b849-94b66c77c565 req-d5aa5c3e-09e1-4999-8a70-6ca8eb3f1792 service nova] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.562322] env[63297]: DEBUG nova.network.neutron [req-bc72195c-86c6-4b1b-b849-94b66c77c565 req-d5aa5c3e-09e1-4999-8a70-6ca8eb3f1792 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing network info cache for port aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1765.846925] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698312, 'name': ReconfigVM_Task, 'duration_secs': 0.528269} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.847267] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1765.904972] env[63297]: DEBUG nova.network.neutron [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1766.037697] env[63297]: DEBUG oslo_vmware.api [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698313, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.067574] env[63297]: DEBUG nova.network.neutron [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [{"id": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "address": "fa:16:3e:78:77:0e", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b49c0c7-27", "ovs_interfaceid": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.284597] env[63297]: DEBUG nova.network.neutron [req-bc72195c-86c6-4b1b-b849-94b66c77c565 req-d5aa5c3e-09e1-4999-8a70-6ca8eb3f1792 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updated VIF entry in instance network info cache for port aa5d34db-425d-4449-81ca-fd7712125808. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1766.285090] env[63297]: DEBUG nova.network.neutron [req-bc72195c-86c6-4b1b-b849-94b66c77c565 req-d5aa5c3e-09e1-4999-8a70-6ca8eb3f1792 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa5d34db-425d-4449-81ca-fd7712125808", "address": "fa:16:3e:7b:37:2f", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa5d34db-42", "ovs_interfaceid": "aa5d34db-425d-4449-81ca-fd7712125808", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.354974] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1766.355491] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1766.355564] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1766.355800] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1766.355965] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1766.356165] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1766.356396] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1766.356611] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1766.356819] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1766.356992] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1766.357238] env[63297]: DEBUG nova.virt.hardware [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1766.363551] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1766.364211] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a5c47ee-e07b-42d2-9662-b4ed61dbb05e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.385478] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_power_states {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1766.393830] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1766.393830] env[63297]: value = "task-1698316" [ 1766.393830] env[63297]: _type = "Task" [ 1766.393830] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.405953] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698316, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.537368] env[63297]: DEBUG oslo_vmware.api [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698313, 'name': ReconfigVM_Task, 'duration_secs': 0.974712} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.538321] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.538550] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Reconfigured VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1766.569961] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.570243] env[63297]: DEBUG nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Instance network_info: |[{"id": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "address": "fa:16:3e:78:77:0e", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b49c0c7-27", "ovs_interfaceid": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1766.570647] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:77:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b49c0c7-27b3-41da-b832-28195da8e8d1', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1766.578777] env[63297]: DEBUG oslo.service.loopingcall [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1766.579215] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1766.579473] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0392753-fb22-4c26-892c-57a3b7786fff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.600153] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1766.600153] env[63297]: value = "task-1698317" [ 1766.600153] env[63297]: _type = "Task" [ 1766.600153] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.609716] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698317, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.787784] env[63297]: DEBUG oslo_concurrency.lockutils [req-bc72195c-86c6-4b1b-b849-94b66c77c565 req-d5aa5c3e-09e1-4999-8a70-6ca8eb3f1792 service nova] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.891068] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Getting list of instances from cluster (obj){ [ 1766.891068] env[63297]: value = "domain-c8" [ 1766.891068] env[63297]: _type = "ClusterComputeResource" [ 1766.891068] env[63297]: } {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1766.892262] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2147a25-8bb8-4f0a-9e06-157ef781986a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.903796] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698316, 'name': ReconfigVM_Task, 'duration_secs': 0.216343} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.904054] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1766.904763] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa35a90d-349f-42cb-8cf7-932f77b8ffa9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.917458] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Got total of 7 instances {{(pid=63297) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1766.917654] env[63297]: WARNING nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] While synchronizing instance power states, found 8 instances in the database and 7 instances on the hypervisor. [ 1766.917839] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Triggering sync for uuid fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 {{(pid=63297) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1766.918047] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Triggering sync for uuid 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 {{(pid=63297) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1766.918226] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Triggering sync for uuid 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d {{(pid=63297) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1766.918397] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Triggering sync for uuid 5c0eefd2-69d4-4100-93b9-d6265c28c7be {{(pid=63297) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1766.918562] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Triggering sync for uuid c1696ee9-cb48-414c-b0a0-b6fa2e880a81 {{(pid=63297) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1766.918724] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Triggering sync for uuid 927824d3-a98b-47b4-a850-1fb15fd0fbe4 {{(pid=63297) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1766.918888] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Triggering sync for uuid c257ee27-8d87-4fe6-a953-cc4af1ec36d6 {{(pid=63297) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1766.919051] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Triggering sync for uuid d7db24c1-35db-46d5-a406-fbb8c1c5d158 {{(pid=63297) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1766.919759] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.920012] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.920292] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.920536] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.920774] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.920953] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.921142] env[63297]: INFO nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 1766.921358] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.921612] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.921860] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.922069] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.922309] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.922486] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.922705] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.922957] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.923197] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.936824] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e16fc4-b885-4586-a7cd-d800fab2a242 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.939626] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906722e4-966d-4340-a744-393a25949d25 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.942432] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8097abb7-c12b-4e27-9de5-e4164269914c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.945073] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f691bb-5c7d-4a71-a1a9-33cd8ccccbbe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.947841] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11d5b79-ce35-4e3a-a30c-5d757f4c3f7d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.957329] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d/6a99c537-e882-4c8c-b7c3-0861a5c0dc0d.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1766.957605] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d770261-a094-43ae-89fa-06feca808283 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.974718] env[63297]: DEBUG nova.compute.manager [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Received event network-changed-a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1766.974912] env[63297]: DEBUG nova.compute.manager [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Refreshing instance network info cache due to event network-changed-a995a9af-3d84-43dd-8695-17446ea38fc8. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1766.975137] env[63297]: DEBUG oslo_concurrency.lockutils [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] Acquiring lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.975280] env[63297]: DEBUG oslo_concurrency.lockutils [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] Acquired lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.975473] env[63297]: DEBUG nova.network.neutron [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Refreshing network info cache for port a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1766.991331] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1766.991331] env[63297]: value = "task-1698318" [ 1766.991331] env[63297]: _type = "Task" [ 1766.991331] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.003402] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698318, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.043292] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9ea1f1d7-87c6-4aaa-92dd-ef306b498472 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-c1696ee9-cb48-414c-b0a0-b6fa2e880a81-aa5d34db-425d-4449-81ca-fd7712125808" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.059s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.112139] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698317, 'name': CreateVM_Task, 'duration_secs': 0.379117} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.112304] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1767.112972] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.113160] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.113483] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1767.113755] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f2c29a7-076d-4dd9-860f-86eec5f803cf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.118310] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1767.118310] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52effacf-e662-971b-4985-921e9e477aff" [ 1767.118310] env[63297]: _type = "Task" [ 1767.118310] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.126802] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52effacf-e662-971b-4985-921e9e477aff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.491928] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.569s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.492376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.570s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.492780] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.572s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.502435] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.506487] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.583s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.506850] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.587s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.631340] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52effacf-e662-971b-4985-921e9e477aff, 'name': SearchDatastore_Task, 'duration_secs': 0.013556} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.631646] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.631878] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1767.632114] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.632262] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.632441] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1767.632706] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1d965c6-4e35-4024-b15d-02a7aac09b02 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.641108] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1767.641299] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1767.642045] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-463ba208-97ca-433f-b9a1-07797dcf00cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.646943] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1767.646943] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d5c88-043a-5926-46f1-e27e5cab190a" [ 1767.646943] env[63297]: _type = "Task" [ 1767.646943] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.654528] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d5c88-043a-5926-46f1-e27e5cab190a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.754943] env[63297]: DEBUG nova.network.neutron [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updated VIF entry in instance network info cache for port a995a9af-3d84-43dd-8695-17446ea38fc8. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1767.755463] env[63297]: DEBUG nova.network.neutron [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updating instance_info_cache with network_info: [{"id": "a995a9af-3d84-43dd-8695-17446ea38fc8", "address": "fa:16:3e:8b:0b:e5", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa995a9af-3d", "ovs_interfaceid": "a995a9af-3d84-43dd-8695-17446ea38fc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.003079] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698318, 'name': ReconfigVM_Task, 'duration_secs': 0.695423} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.003388] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d/6a99c537-e882-4c8c-b7c3-0861a5c0dc0d.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1768.003683] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1768.067254] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1768.067496] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354022', 'volume_id': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'name': 'volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5c0eefd2-69d4-4100-93b9-d6265c28c7be', 'attached_at': '', 'detached_at': '', 'volume_id': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'serial': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1768.068645] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0fe492-4592-494d-9e16-1baca77db53e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.086132] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf1602a-901f-49ec-8153-f15f22ff0e11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.110784] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1/volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1768.111125] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b590b2de-379d-45b4-bc5d-e7f8ea22fb10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.129450] env[63297]: DEBUG oslo_vmware.api [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1768.129450] env[63297]: value = "task-1698319" [ 1768.129450] env[63297]: _type = "Task" [ 1768.129450] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.137844] env[63297]: DEBUG oslo_vmware.api [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.157014] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d5c88-043a-5926-46f1-e27e5cab190a, 'name': SearchDatastore_Task, 'duration_secs': 0.015277} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.157867] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f348fa9a-2fd3-450b-a8af-01a666ffd90a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.164227] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1768.164227] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528f2e63-c00d-ab65-160a-47f25a66f24b" [ 1768.164227] env[63297]: _type = "Task" [ 1768.164227] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.171922] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528f2e63-c00d-ab65-160a-47f25a66f24b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.259340] env[63297]: DEBUG oslo_concurrency.lockutils [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] Releasing lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.259614] env[63297]: DEBUG nova.compute.manager [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received event network-changed-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1768.259793] env[63297]: DEBUG nova.compute.manager [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Refreshing instance network info cache due to event network-changed-8b49c0c7-27b3-41da-b832-28195da8e8d1. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1768.260091] env[63297]: DEBUG oslo_concurrency.lockutils [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] Acquiring lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.260159] env[63297]: DEBUG oslo_concurrency.lockutils [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] Acquired lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1768.260321] env[63297]: DEBUG nova.network.neutron [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Refreshing network info cache for port 8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1768.510862] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca02e373-ad3b-48dd-a487-ba18c992a381 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.531878] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a7e278-7076-4d82-a266-c64aac3bb44b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.551851] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1768.645442] env[63297]: DEBUG oslo_vmware.api [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698319, 'name': ReconfigVM_Task, 'duration_secs': 0.378121} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.645718] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1/volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1768.651256] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2bd8f48-2566-423b-a5c7-576d9c815ced {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.680494] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528f2e63-c00d-ab65-160a-47f25a66f24b, 'name': SearchDatastore_Task, 'duration_secs': 0.010055} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.681995] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.682439] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158/d7db24c1-35db-46d5-a406-fbb8c1c5d158.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1768.682794] env[63297]: DEBUG oslo_vmware.api [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1768.682794] env[63297]: value = "task-1698320" [ 1768.682794] env[63297]: _type = "Task" [ 1768.682794] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.683227] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-092cc013-3004-4698-b3e8-aae0cfc353bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.699948] env[63297]: DEBUG oslo_vmware.api [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.702074] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1768.702074] env[63297]: value = "task-1698321" [ 1768.702074] env[63297]: _type = "Task" [ 1768.702074] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.719563] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.789693] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-c1696ee9-cb48-414c-b0a0-b6fa2e880a81-aa5d34db-425d-4449-81ca-fd7712125808" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.789986] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-c1696ee9-cb48-414c-b0a0-b6fa2e880a81-aa5d34db-425d-4449-81ca-fd7712125808" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.002798] env[63297]: DEBUG nova.network.neutron [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updated VIF entry in instance network info cache for port 8b49c0c7-27b3-41da-b832-28195da8e8d1. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1769.003189] env[63297]: DEBUG nova.network.neutron [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [{"id": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "address": "fa:16:3e:78:77:0e", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b49c0c7-27", "ovs_interfaceid": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.196049] env[63297]: DEBUG oslo_vmware.api [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698320, 'name': ReconfigVM_Task, 'duration_secs': 0.156069} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.196414] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354022', 'volume_id': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'name': 'volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5c0eefd2-69d4-4100-93b9-d6265c28c7be', 'attached_at': '', 'detached_at': '', 'volume_id': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'serial': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1769.211294] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698321, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.292903] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.294152] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.294489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c737d06c-f4da-454b-b4fc-8f3121ec9262 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.313505] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3d9359-f6a0-4922-8046-3b8bfb8a47de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.340759] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Reconfiguring VM to detach interface {{(pid=63297) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1769.342066] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7221920-3b45-4cec-896e-236194d8cba3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.361185] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1769.361185] env[63297]: value = "task-1698323" [ 1769.361185] env[63297]: _type = "Task" [ 1769.361185] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.370705] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.506302] env[63297]: DEBUG oslo_concurrency.lockutils [req-f77c553e-0256-4415-ab04-7d68d79acce1 req-f49ba79a-d64d-466b-8b76-052bab894244 service nova] Releasing lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.711667] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.931795} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.711971] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158/d7db24c1-35db-46d5-a406-fbb8c1c5d158.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1769.712228] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1769.712472] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d7a2400-0e30-4b0b-8770-b98fde4c7e3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.718926] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1769.718926] env[63297]: value = "task-1698324" [ 1769.718926] env[63297]: _type = "Task" [ 1769.718926] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.726684] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698324, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.871484] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.184817] env[63297]: DEBUG nova.network.neutron [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Port c31a25f5-7d02-427f-932a-464daf59e755 binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1770.229403] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698324, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076119} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.229672] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1770.230454] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777002cd-0be3-4267-8cf6-1d8d67a1d2ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.246020] env[63297]: DEBUG nova.objects.instance [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'flavor' on Instance uuid 5c0eefd2-69d4-4100-93b9-d6265c28c7be {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1770.256344] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158/d7db24c1-35db-46d5-a406-fbb8c1c5d158.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1770.256659] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-460f9207-7e6d-4c34-b47c-2a5937709a00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.277413] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1770.277413] env[63297]: value = "task-1698325" [ 1770.277413] env[63297]: _type = "Task" [ 1770.277413] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.285801] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698325, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.371531] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.750997] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9df5a7e4-3f40-489f-9eb7-45de2d39bd9e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.281s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.752453] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.831s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.753407] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d140790a-56c1-437d-97c8-b9be05e02079 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.787311] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698325, 'name': ReconfigVM_Task, 'duration_secs': 0.316046} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.787583] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Reconfigured VM instance instance-00000071 to attach disk [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158/d7db24c1-35db-46d5-a406-fbb8c1c5d158.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1770.788270] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16caf57d-7428-4f6b-bf71-9bea67629ce9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.795078] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1770.795078] env[63297]: value = "task-1698326" [ 1770.795078] env[63297]: _type = "Task" [ 1770.795078] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.802698] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698326, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.872172] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.972458] env[63297]: DEBUG oslo_concurrency.lockutils [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.207946] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.207946] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.208184] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.262865] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.510s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.263128] env[63297]: DEBUG oslo_concurrency.lockutils [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.291s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.306091] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698326, 'name': Rename_Task, 'duration_secs': 0.136105} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.306382] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1771.306657] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a70b0ecd-9aa2-420e-896f-d0e9d06ad06b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.313261] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1771.313261] env[63297]: value = "task-1698328" [ 1771.313261] env[63297]: _type = "Task" [ 1771.313261] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.322476] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.373258] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.766143] env[63297]: INFO nova.compute.manager [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Detaching volume 2b804b7b-6e07-4705-9d8b-8906c4ad1af1 [ 1771.799206] env[63297]: INFO nova.virt.block_device [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Attempting to driver detach volume 2b804b7b-6e07-4705-9d8b-8906c4ad1af1 from mountpoint /dev/sdb [ 1771.799460] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1771.799644] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354022', 'volume_id': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'name': 'volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5c0eefd2-69d4-4100-93b9-d6265c28c7be', 'attached_at': '', 'detached_at': '', 'volume_id': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'serial': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1771.800540] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25408b3c-f8e1-4f6c-9f75-8db41d0fae6e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.825081] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b992662b-580d-4b10-a02c-1d056b954750 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.832518] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698328, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.834666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96addb0a-7474-4a80-aae5-9d80a4760965 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.854445] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31960f79-45ba-4216-86aa-681257fa56db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.868574] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] The volume has not been displaced from its original location: [datastore1] volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1/volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1771.873792] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1771.876478] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b91ebce0-9be8-4666-9055-e60fe65545fd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.894719] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.895912] env[63297]: DEBUG oslo_vmware.api [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1771.895912] env[63297]: value = "task-1698329" [ 1771.895912] env[63297]: _type = "Task" [ 1771.895912] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.903127] env[63297]: DEBUG oslo_vmware.api [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.243298] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.243561] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.243770] env[63297]: DEBUG nova.network.neutron [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.335465] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698328, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.377346] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.405355] env[63297]: DEBUG oslo_vmware.api [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698329, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.831703] env[63297]: DEBUG oslo_vmware.api [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698328, 'name': PowerOnVM_Task, 'duration_secs': 1.113232} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.832033] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1772.832202] env[63297]: INFO nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Took 8.62 seconds to spawn the instance on the hypervisor. [ 1772.832344] env[63297]: DEBUG nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1772.833089] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2e74cd-237d-464e-9ba5-a5d9b4e5c882 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.877773] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.906205] env[63297]: DEBUG oslo_vmware.api [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698329, 'name': ReconfigVM_Task, 'duration_secs': 0.58344} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.906458] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1772.911159] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8403a49b-95d3-4a26-ac07-89aa91d00012 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.928769] env[63297]: DEBUG oslo_vmware.api [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1772.928769] env[63297]: value = "task-1698330" [ 1772.928769] env[63297]: _type = "Task" [ 1772.928769] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.936977] env[63297]: DEBUG oslo_vmware.api [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698330, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.959224] env[63297]: DEBUG nova.network.neutron [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance_info_cache with network_info: [{"id": "c31a25f5-7d02-427f-932a-464daf59e755", "address": "fa:16:3e:a0:49:93", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc31a25f5-7d", "ovs_interfaceid": "c31a25f5-7d02-427f-932a-464daf59e755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.353442] env[63297]: INFO nova.compute.manager [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Took 13.49 seconds to build instance. [ 1773.379322] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.438515] env[63297]: DEBUG oslo_vmware.api [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698330, 'name': ReconfigVM_Task, 'duration_secs': 0.136472} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.438815] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354022', 'volume_id': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'name': 'volume-2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5c0eefd2-69d4-4100-93b9-d6265c28c7be', 'attached_at': '', 'detached_at': '', 'volume_id': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1', 'serial': '2b804b7b-6e07-4705-9d8b-8906c4ad1af1'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1773.462721] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.855951] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5e982c69-eaad-485b-83a8-e5fffe34ba04 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.999s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.856403] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.933s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.856498] env[63297]: INFO nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] During sync_power_state the instance has a pending task (spawning). Skip. [ 1773.856680] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.880501] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.920890] env[63297]: DEBUG nova.compute.manager [req-16f7b383-426b-4701-96d6-e321be758d88 req-1348bf5c-6e2c-404e-9ba5-a2b4b8042b29 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received event network-changed-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1773.921108] env[63297]: DEBUG nova.compute.manager [req-16f7b383-426b-4701-96d6-e321be758d88 req-1348bf5c-6e2c-404e-9ba5-a2b4b8042b29 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Refreshing instance network info cache due to event network-changed-8b49c0c7-27b3-41da-b832-28195da8e8d1. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1773.921339] env[63297]: DEBUG oslo_concurrency.lockutils [req-16f7b383-426b-4701-96d6-e321be758d88 req-1348bf5c-6e2c-404e-9ba5-a2b4b8042b29 service nova] Acquiring lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.921485] env[63297]: DEBUG oslo_concurrency.lockutils [req-16f7b383-426b-4701-96d6-e321be758d88 req-1348bf5c-6e2c-404e-9ba5-a2b4b8042b29 service nova] Acquired lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.921643] env[63297]: DEBUG nova.network.neutron [req-16f7b383-426b-4701-96d6-e321be758d88 req-1348bf5c-6e2c-404e-9ba5-a2b4b8042b29 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Refreshing network info cache for port 8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1773.972620] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29063c9f-545b-43c6-9a65-3a6c168a49f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.980553] env[63297]: DEBUG nova.objects.instance [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'flavor' on Instance uuid 5c0eefd2-69d4-4100-93b9-d6265c28c7be {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1773.982168] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf761f4-48bb-4708-8111-c2bb37130f6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.381988] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.649946] env[63297]: DEBUG nova.network.neutron [req-16f7b383-426b-4701-96d6-e321be758d88 req-1348bf5c-6e2c-404e-9ba5-a2b4b8042b29 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updated VIF entry in instance network info cache for port 8b49c0c7-27b3-41da-b832-28195da8e8d1. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1774.650481] env[63297]: DEBUG nova.network.neutron [req-16f7b383-426b-4701-96d6-e321be758d88 req-1348bf5c-6e2c-404e-9ba5-a2b4b8042b29 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [{"id": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "address": "fa:16:3e:78:77:0e", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b49c0c7-27", "ovs_interfaceid": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.882303] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.992539] env[63297]: DEBUG oslo_concurrency.lockutils [None req-83371473-d209-4387-bf59-d0a963c7aab9 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.728s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.154973] env[63297]: DEBUG oslo_concurrency.lockutils [req-16f7b383-426b-4701-96d6-e321be758d88 req-1348bf5c-6e2c-404e-9ba5-a2b4b8042b29 service nova] Releasing lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.184681] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d10a9f-03c9-4e17-b049-9e6b420483ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.205728] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.206746] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ece4f2-c159-4b67-800d-a641e6e330b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.213755] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1775.241412] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.241654] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.241866] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.242072] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.242244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.244349] env[63297]: INFO nova.compute.manager [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Terminating instance [ 1775.247731] env[63297]: DEBUG nova.compute.manager [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1775.247941] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1775.248809] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb9065e-ec65-476a-b571-5307cb22c483 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.257871] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1775.258218] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5700521c-1c90-4e7f-8087-ca1336ba724e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.264722] env[63297]: DEBUG oslo_vmware.api [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1775.264722] env[63297]: value = "task-1698333" [ 1775.264722] env[63297]: _type = "Task" [ 1775.264722] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.276259] env[63297]: DEBUG oslo_vmware.api [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.383196] env[63297]: DEBUG oslo_vmware.api [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698323, 'name': ReconfigVM_Task, 'duration_secs': 5.866503} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.383494] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.383776] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Reconfigured VM to detach interface {{(pid=63297) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1775.720327] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1775.720836] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1b06291-b561-40d0-a6e4-05aefd32cc46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.727990] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1775.727990] env[63297]: value = "task-1698334" [ 1775.727990] env[63297]: _type = "Task" [ 1775.727990] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.737745] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698334, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.774510] env[63297]: DEBUG oslo_vmware.api [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698333, 'name': PowerOffVM_Task, 'duration_secs': 0.289101} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.774769] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1775.774942] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1775.775217] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-210b0b5d-a705-4f38-9e9e-971f32d5f2d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.929617] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1775.930072] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1775.930072] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleting the datastore file [datastore1] 5c0eefd2-69d4-4100-93b9-d6265c28c7be {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1775.930401] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a48f5a7c-b217-47fe-bc17-4280690237c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.944527] env[63297]: DEBUG oslo_vmware.api [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1775.944527] env[63297]: value = "task-1698336" [ 1775.944527] env[63297]: _type = "Task" [ 1775.944527] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.952927] env[63297]: DEBUG oslo_vmware.api [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.238302] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698334, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.455145] env[63297]: DEBUG oslo_vmware.api [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.710163] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.710365] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.710549] env[63297]: DEBUG nova.network.neutron [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1776.738406] env[63297]: DEBUG oslo_vmware.api [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698334, 'name': PowerOnVM_Task, 'duration_secs': 0.780182} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.738652] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1776.738831] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb0cd3f-ff83-409f-a4bf-8e2aba92fcc5 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1776.955819] env[63297]: DEBUG oslo_vmware.api [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.529079} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.955819] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1776.955819] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1776.955819] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1776.955819] env[63297]: INFO nova.compute.manager [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1776.956360] env[63297]: DEBUG oslo.service.loopingcall [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1776.956360] env[63297]: DEBUG nova.compute.manager [-] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1776.956360] env[63297]: DEBUG nova.network.neutron [-] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1777.444364] env[63297]: DEBUG nova.compute.manager [req-dc99208d-eb8f-4f60-97c4-950f1c317434 req-69158f5e-ffdf-4fed-adfd-d1efc279cf99 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Received event network-vif-deleted-191de685-dee5-4eac-944a-940a39615f0c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1777.444570] env[63297]: INFO nova.compute.manager [req-dc99208d-eb8f-4f60-97c4-950f1c317434 req-69158f5e-ffdf-4fed-adfd-d1efc279cf99 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Neutron deleted interface 191de685-dee5-4eac-944a-940a39615f0c; detaching it from the instance and deleting it from the info cache [ 1777.444732] env[63297]: DEBUG nova.network.neutron [req-dc99208d-eb8f-4f60-97c4-950f1c317434 req-69158f5e-ffdf-4fed-adfd-d1efc279cf99 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.665255] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.666287] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.801592] env[63297]: INFO nova.network.neutron [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Port aa5d34db-425d-4449-81ca-fd7712125808 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1777.802028] env[63297]: DEBUG nova.network.neutron [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.913032] env[63297]: DEBUG nova.compute.manager [req-8abbd291-8952-4574-b7ba-01ad3892e3f2 req-d147ce89-6ad0-4671-aba2-aa2a1a6d9fa0 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-changed-da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1777.913211] env[63297]: DEBUG nova.compute.manager [req-8abbd291-8952-4574-b7ba-01ad3892e3f2 req-d147ce89-6ad0-4671-aba2-aa2a1a6d9fa0 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing instance network info cache due to event network-changed-da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1777.913404] env[63297]: DEBUG oslo_concurrency.lockutils [req-8abbd291-8952-4574-b7ba-01ad3892e3f2 req-d147ce89-6ad0-4671-aba2-aa2a1a6d9fa0 service nova] Acquiring lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1777.920472] env[63297]: DEBUG nova.network.neutron [-] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.948426] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58adca84-f023-475d-ad46-abb0bc93c333 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.958385] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4badf79-278f-4be3-b839-6eef0bce81a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.988382] env[63297]: DEBUG nova.compute.manager [req-dc99208d-eb8f-4f60-97c4-950f1c317434 req-69158f5e-ffdf-4fed-adfd-d1efc279cf99 service nova] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Detach interface failed, port_id=191de685-dee5-4eac-944a-940a39615f0c, reason: Instance 5c0eefd2-69d4-4100-93b9-d6265c28c7be could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1778.169522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.169522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.169651] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.170282] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1778.171064] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df6da66-4eb3-4e1c-b9e6-a3823691ce0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.180358] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dbcd4a-8247-4f6b-a57e-93c1d8274c3b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.195900] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe86b102-e7b0-4b17-8b54-934f133f774c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.204810] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9626f9-fdad-4363-8491-f8fee53a62f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.234668] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180036MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1778.234835] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.235051] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.304387] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.307683] env[63297]: DEBUG oslo_concurrency.lockutils [req-8abbd291-8952-4574-b7ba-01ad3892e3f2 req-d147ce89-6ad0-4671-aba2-aa2a1a6d9fa0 service nova] Acquired lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.307880] env[63297]: DEBUG nova.network.neutron [req-8abbd291-8952-4574-b7ba-01ad3892e3f2 req-d147ce89-6ad0-4671-aba2-aa2a1a6d9fa0 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Refreshing network info cache for port da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1778.423085] env[63297]: INFO nova.compute.manager [-] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Took 1.47 seconds to deallocate network for instance. [ 1778.588184] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-927824d3-a98b-47b4-a850-1fb15fd0fbe4-aa5d34db-425d-4449-81ca-fd7712125808" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.588479] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-927824d3-a98b-47b4-a850-1fb15fd0fbe4-aa5d34db-425d-4449-81ca-fd7712125808" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.588925] env[63297]: DEBUG nova.objects.instance [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'flavor' on Instance uuid 927824d3-a98b-47b4-a850-1fb15fd0fbe4 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1778.808187] env[63297]: DEBUG oslo_concurrency.lockutils [None req-74550df0-f9c6-4773-a736-7f0dde445222 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-c1696ee9-cb48-414c-b0a0-b6fa2e880a81-aa5d34db-425d-4449-81ca-fd7712125808" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.018s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.929031] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.015575] env[63297]: DEBUG nova.network.neutron [req-8abbd291-8952-4574-b7ba-01ad3892e3f2 req-d147ce89-6ad0-4671-aba2-aa2a1a6d9fa0 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updated VIF entry in instance network info cache for port da1879c7-6003-4193-ab1c-019184bded8d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1779.016197] env[63297]: DEBUG nova.network.neutron [req-8abbd291-8952-4574-b7ba-01ad3892e3f2 req-d147ce89-6ad0-4671-aba2-aa2a1a6d9fa0 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [{"id": "da1879c7-6003-4193-ab1c-019184bded8d", "address": "fa:16:3e:c8:fa:05", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda1879c7-60", "ovs_interfaceid": "da1879c7-6003-4193-ab1c-019184bded8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.162845] env[63297]: DEBUG nova.network.neutron [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Port c31a25f5-7d02-427f-932a-464daf59e755 binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1779.162845] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.163057] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.163606] env[63297]: DEBUG nova.network.neutron [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1779.196319] env[63297]: DEBUG nova.objects.instance [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'pci_requests' on Instance uuid 927824d3-a98b-47b4-a850-1fb15fd0fbe4 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1779.245074] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Applying migration context for instance 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d as it has an incoming, in-progress migration 1da2ee28-cb5f-4925-825a-0db5daf4f8d3. Migration status is reverting {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1779.246799] env[63297]: INFO nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating resource usage from migration 1da2ee28-cb5f-4925-825a-0db5daf4f8d3 [ 1779.268713] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.268888] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.269065] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 5c0eefd2-69d4-4100-93b9-d6265c28c7be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.269202] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance c1696ee9-cb48-414c-b0a0-b6fa2e880a81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.269334] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 927824d3-a98b-47b4-a850-1fb15fd0fbe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.269460] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance c257ee27-8d87-4fe6-a953-cc4af1ec36d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.269649] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Migration 1da2ee28-cb5f-4925-825a-0db5daf4f8d3 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1779.269804] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.269938] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance d7db24c1-35db-46d5-a406-fbb8c1c5d158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.270178] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1779.270329] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1779.390054] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b7a8a0-484b-4e40-9357-1b41cc4f01a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.398029] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da3a46a-af39-45d7-bf31-e70abe65b22f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.427735] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd37bb1a-96da-4b87-9b92-f7eedc2a07ba {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.435231] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b9bf65-ca52-452c-9ef1-4c526edf0d03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.448263] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1779.519553] env[63297]: DEBUG oslo_concurrency.lockutils [req-8abbd291-8952-4574-b7ba-01ad3892e3f2 req-d147ce89-6ad0-4671-aba2-aa2a1a6d9fa0 service nova] Releasing lock "refresh_cache-c1696ee9-cb48-414c-b0a0-b6fa2e880a81" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.528799] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquiring lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.529082] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.529300] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquiring lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.529484] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.529653] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.531903] env[63297]: INFO nova.compute.manager [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Terminating instance [ 1779.534067] env[63297]: DEBUG nova.compute.manager [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1779.534322] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1779.535275] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9a6973-916d-4d1e-8f87-e26055b79e60 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.543660] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1779.543895] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae4b28d5-8105-4537-ad74-3d894e6f3b46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.550517] env[63297]: DEBUG oslo_vmware.api [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1779.550517] env[63297]: value = "task-1698339" [ 1779.550517] env[63297]: _type = "Task" [ 1779.550517] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.559568] env[63297]: DEBUG oslo_vmware.api [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1698339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.699443] env[63297]: DEBUG nova.objects.base [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Object Instance<927824d3-a98b-47b4-a850-1fb15fd0fbe4> lazy-loaded attributes: flavor,pci_requests {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1779.699686] env[63297]: DEBUG nova.network.neutron [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1779.779607] env[63297]: DEBUG nova.policy [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21dc2049dd84f87a3b7cbcd7ba0ebcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48bfb708de5c4dd287530be2f8483ca9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1779.883618] env[63297]: DEBUG nova.network.neutron [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance_info_cache with network_info: [{"id": "c31a25f5-7d02-427f-932a-464daf59e755", "address": "fa:16:3e:a0:49:93", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc31a25f5-7d", "ovs_interfaceid": "c31a25f5-7d02-427f-932a-464daf59e755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.944130] env[63297]: DEBUG nova.compute.manager [req-ababf346-4c63-40c3-bb9c-dfc1645def1a req-5ba94d96-f67a-48d0-8f83-6af5b801adb5 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received event network-changed-29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1779.944334] env[63297]: DEBUG nova.compute.manager [req-ababf346-4c63-40c3-bb9c-dfc1645def1a req-5ba94d96-f67a-48d0-8f83-6af5b801adb5 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing instance network info cache due to event network-changed-29d488c8-0537-45c6-97ba-b70aec11141b. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1779.944540] env[63297]: DEBUG oslo_concurrency.lockutils [req-ababf346-4c63-40c3-bb9c-dfc1645def1a req-5ba94d96-f67a-48d0-8f83-6af5b801adb5 service nova] Acquiring lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.944682] env[63297]: DEBUG oslo_concurrency.lockutils [req-ababf346-4c63-40c3-bb9c-dfc1645def1a req-5ba94d96-f67a-48d0-8f83-6af5b801adb5 service nova] Acquired lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.944843] env[63297]: DEBUG nova.network.neutron [req-ababf346-4c63-40c3-bb9c-dfc1645def1a req-5ba94d96-f67a-48d0-8f83-6af5b801adb5 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing network info cache for port 29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1779.951214] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1780.060530] env[63297]: DEBUG oslo_vmware.api [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1698339, 'name': PowerOffVM_Task, 'duration_secs': 0.17848} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.060848] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1780.060960] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1780.061226] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8669e280-5590-4293-9128-ee0837849bc4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.137849] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1780.138127] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1780.138272] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Deleting the datastore file [datastore1] 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1780.138543] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-688a1e46-1f35-4258-9e04-0d9ec5031e01 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.144728] env[63297]: DEBUG oslo_vmware.api [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for the task: (returnval){ [ 1780.144728] env[63297]: value = "task-1698341" [ 1780.144728] env[63297]: _type = "Task" [ 1780.144728] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.152395] env[63297]: DEBUG oslo_vmware.api [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1698341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.386702] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.456123] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1780.456123] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.221s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.456319] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.527s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.456447] env[63297]: DEBUG nova.objects.instance [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'resources' on Instance uuid 5c0eefd2-69d4-4100-93b9-d6265c28c7be {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1780.654515] env[63297]: DEBUG oslo_vmware.api [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1698341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.659373] env[63297]: DEBUG nova.network.neutron [req-ababf346-4c63-40c3-bb9c-dfc1645def1a req-5ba94d96-f67a-48d0-8f83-6af5b801adb5 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updated VIF entry in instance network info cache for port 29d488c8-0537-45c6-97ba-b70aec11141b. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1780.659694] env[63297]: DEBUG nova.network.neutron [req-ababf346-4c63-40c3-bb9c-dfc1645def1a req-5ba94d96-f67a-48d0-8f83-6af5b801adb5 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.890173] env[63297]: DEBUG nova.compute.manager [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63297) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1781.075290] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4a06a3-4fae-4e54-a9ba-3b0a8e4c93e7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.086387] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f90d264-1573-4c64-86a6-8a144b4d0411 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.113705] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76475819-e8b7-46ec-a2f3-67fa9af42c21 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.121008] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d768f95-4484-4f31-b80f-c8c20efc99fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.138724] env[63297]: DEBUG nova.compute.provider_tree [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1781.158134] env[63297]: DEBUG oslo_vmware.api [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Task: {'id': task-1698341, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.707931} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.158501] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1781.158810] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1781.159098] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1781.159389] env[63297]: INFO nova.compute.manager [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1781.159760] env[63297]: DEBUG oslo.service.loopingcall [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1781.160047] env[63297]: DEBUG nova.compute.manager [-] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1781.160201] env[63297]: DEBUG nova.network.neutron [-] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1781.162685] env[63297]: DEBUG oslo_concurrency.lockutils [req-ababf346-4c63-40c3-bb9c-dfc1645def1a req-5ba94d96-f67a-48d0-8f83-6af5b801adb5 service nova] Releasing lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.291652] env[63297]: DEBUG nova.network.neutron [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Successfully updated port: aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1781.452991] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1781.453268] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1781.453420] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1781.453660] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1781.611507] env[63297]: DEBUG nova.compute.manager [req-a9009748-ee33-4e4c-a970-8f3e3f363612 req-e5f7863c-d9dc-47b6-a3a7-82ec01a5849b service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Received event network-vif-deleted-bf9b1829-ba35-499e-993f-44fbd669974d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1781.611675] env[63297]: INFO nova.compute.manager [req-a9009748-ee33-4e4c-a970-8f3e3f363612 req-e5f7863c-d9dc-47b6-a3a7-82ec01a5849b service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Neutron deleted interface bf9b1829-ba35-499e-993f-44fbd669974d; detaching it from the instance and deleting it from the info cache [ 1781.611855] env[63297]: DEBUG nova.network.neutron [req-a9009748-ee33-4e4c-a970-8f3e3f363612 req-e5f7863c-d9dc-47b6-a3a7-82ec01a5849b service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.642060] env[63297]: DEBUG nova.scheduler.client.report [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1781.799737] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.799939] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.800134] env[63297]: DEBUG nova.network.neutron [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1781.957883] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Skipping network cache update for instance because it is being deleted. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1781.971818] env[63297]: DEBUG nova.compute.manager [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received event network-vif-plugged-aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1781.972015] env[63297]: DEBUG oslo_concurrency.lockutils [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] Acquiring lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.972142] env[63297]: DEBUG oslo_concurrency.lockutils [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.972252] env[63297]: DEBUG oslo_concurrency.lockutils [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.972415] env[63297]: DEBUG nova.compute.manager [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] No waiting events found dispatching network-vif-plugged-aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1781.972531] env[63297]: WARNING nova.compute.manager [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received unexpected event network-vif-plugged-aa5d34db-425d-4449-81ca-fd7712125808 for instance with vm_state active and task_state None. [ 1781.972688] env[63297]: DEBUG nova.compute.manager [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received event network-changed-aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1781.972836] env[63297]: DEBUG nova.compute.manager [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing instance network info cache due to event network-changed-aa5d34db-425d-4449-81ca-fd7712125808. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1781.972994] env[63297]: DEBUG oslo_concurrency.lockutils [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] Acquiring lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.986238] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.986368] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.986508] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1781.986654] env[63297]: DEBUG nova.objects.instance [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lazy-loading 'info_cache' on Instance uuid fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1781.988610] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.091294] env[63297]: DEBUG nova.network.neutron [-] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.114281] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b79f3b4-f348-47be-8016-abce63aca652 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.125918] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0326909-c720-42f5-94bb-4068fbbbb6e9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.145999] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.148720] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.160s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.165299] env[63297]: DEBUG nova.compute.manager [req-a9009748-ee33-4e4c-a970-8f3e3f363612 req-e5f7863c-d9dc-47b6-a3a7-82ec01a5849b service nova] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Detach interface failed, port_id=bf9b1829-ba35-499e-993f-44fbd669974d, reason: Instance 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1782.168949] env[63297]: INFO nova.scheduler.client.report [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleted allocations for instance 5c0eefd2-69d4-4100-93b9-d6265c28c7be [ 1782.338550] env[63297]: WARNING nova.network.neutron [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] 250ab20f-3057-41ed-bb65-926464a4e926 already exists in list: networks containing: ['250ab20f-3057-41ed-bb65-926464a4e926']. ignoring it [ 1782.593344] env[63297]: INFO nova.compute.manager [-] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Took 1.43 seconds to deallocate network for instance. [ 1782.620945] env[63297]: DEBUG nova.network.neutron [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa5d34db-425d-4449-81ca-fd7712125808", "address": "fa:16:3e:7b:37:2f", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa5d34db-42", "ovs_interfaceid": "aa5d34db-425d-4449-81ca-fd7712125808", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.665971] env[63297]: DEBUG nova.objects.instance [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'migration_context' on Instance uuid 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1782.675996] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f4fcf47f-5fa4-44ee-8b02-07e6c5f7c97a tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "5c0eefd2-69d4-4100-93b9-d6265c28c7be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.434s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.100186] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.123336] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.124605] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.124697] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.125030] env[63297]: DEBUG oslo_concurrency.lockutils [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] Acquired lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.125240] env[63297]: DEBUG nova.network.neutron [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Refreshing network info cache for port aa5d34db-425d-4449-81ca-fd7712125808 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1783.126996] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd737815-a0ad-4751-b8ff-5fa0f0096ce4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.147787] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1783.148113] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1783.148371] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1783.148618] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1783.148813] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1783.149016] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1783.149310] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1783.149522] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1783.149740] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1783.149923] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1783.150153] env[63297]: DEBUG nova.virt.hardware [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1783.157018] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Reconfiguring VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1783.158101] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-363f8854-d338-431f-ac83-a96c89904e8c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.178580] env[63297]: DEBUG oslo_vmware.api [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1783.178580] env[63297]: value = "task-1698343" [ 1783.178580] env[63297]: _type = "Task" [ 1783.178580] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.186900] env[63297]: DEBUG oslo_vmware.api [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698343, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.277769] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f603578-cbc1-4c26-a60b-524bd912b66d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.287346] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276007b6-b22f-4d5d-9b63-7ff64a92c5b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.329173] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320e65ca-f500-49c1-9e0a-c1bdc5d8204a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.337103] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fe0837-4022-44a6-9a47-b8164af0e1d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.350972] env[63297]: DEBUG nova.compute.provider_tree [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1783.690767] env[63297]: DEBUG oslo_vmware.api [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698343, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.729316] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updating instance_info_cache with network_info: [{"id": "1952432a-7339-4c5d-80fc-5dac66b659e2", "address": "fa:16:3e:80:2c:fe", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1952432a-73", "ovs_interfaceid": "1952432a-7339-4c5d-80fc-5dac66b659e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.854548] env[63297]: DEBUG nova.scheduler.client.report [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1783.922896] env[63297]: DEBUG nova.network.neutron [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updated VIF entry in instance network info cache for port aa5d34db-425d-4449-81ca-fd7712125808. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1783.923414] env[63297]: DEBUG nova.network.neutron [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa5d34db-425d-4449-81ca-fd7712125808", "address": "fa:16:3e:7b:37:2f", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa5d34db-42", "ovs_interfaceid": "aa5d34db-425d-4449-81ca-fd7712125808", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.190650] env[63297]: DEBUG oslo_vmware.api [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698343, 'name': ReconfigVM_Task, 'duration_secs': 0.576877} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.197106] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.197330] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Reconfigured VM to attach interface {{(pid=63297) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1784.231856] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.232085] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1784.232321] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1784.232493] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1784.232668] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1784.232824] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1784.232963] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1784.426672] env[63297]: DEBUG oslo_concurrency.lockutils [req-eeb8a38d-f7fe-4cd5-8002-9de2b09d0f99 req-540adcce-acdf-4a1c-ac05-9905ef3f9928 service nova] Releasing lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.542972] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.543223] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.702206] env[63297]: DEBUG oslo_concurrency.lockutils [None req-a56f80b2-7e01-4b1d-95d3-d98006b50c71 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-927824d3-a98b-47b4-a850-1fb15fd0fbe4-aa5d34db-425d-4449-81ca-fd7712125808" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.114s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.866957] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.718s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.872477] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.772s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.872687] env[63297]: DEBUG nova.objects.instance [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lazy-loading 'resources' on Instance uuid 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1785.045582] env[63297]: DEBUG nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1785.340907] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.341348] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.507483] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9291407d-8cae-44a6-b0dc-0f49a2c45114 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.515531] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aabbfd4-8d74-4ef3-986e-8d2dab68c403 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.547378] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51632432-357c-4160-9f05-40f1018efa10 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.557163] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e03383-02a3-4c1d-86d2-a693b70a6b49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.563843] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.571903] env[63297]: DEBUG nova.compute.provider_tree [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1785.844692] env[63297]: DEBUG nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1786.075028] env[63297]: DEBUG nova.scheduler.client.report [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1786.311694] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "interface-927824d3-a98b-47b4-a850-1fb15fd0fbe4-aa5d34db-425d-4449-81ca-fd7712125808" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.311965] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-927824d3-a98b-47b4-a850-1fb15fd0fbe4-aa5d34db-425d-4449-81ca-fd7712125808" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.368442] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.406684] env[63297]: INFO nova.compute.manager [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Swapping old allocation on dict_keys(['88960333-a089-4255-ad72-5c02d57b2b35']) held by migration 1da2ee28-cb5f-4925-825a-0db5daf4f8d3 for instance [ 1786.429979] env[63297]: DEBUG nova.scheduler.client.report [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Overwriting current allocation {'allocations': {'88960333-a089-4255-ad72-5c02d57b2b35': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 166}}, 'project_id': '6e3dcd98ebe94a75a94322b03feba3b4', 'user_id': 'be5e07baf148496880261386dff8df76', 'consumer_generation': 1} on consumer 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d {{(pid=63297) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1786.533557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.533752] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.533930] env[63297]: DEBUG nova.network.neutron [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1786.581058] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.709s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.584323] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.020s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.586640] env[63297]: INFO nova.compute.claims [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1786.600809] env[63297]: INFO nova.scheduler.client.report [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Deleted allocations for instance 10def566-2d1f-4ea2-9df5-ebf4d77f7b48 [ 1786.815528] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.815717] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.816734] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbb5162-21bb-4244-b6c8-1f8271693084 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.835296] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22faeb51-26f7-4c60-9b04-4509e89cdaff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.864741] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Reconfiguring VM to detach interface {{(pid=63297) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1786.864907] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e95d3de9-5a31-45aa-a380-f751f8a292bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.886629] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1786.886629] env[63297]: value = "task-1698344" [ 1786.886629] env[63297]: _type = "Task" [ 1786.886629] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.895667] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.109452] env[63297]: DEBUG oslo_concurrency.lockutils [None req-628fbf2d-d3bd-460d-b057-a05662198e90 tempest-ServersV294TestFqdnHostnames-929024421 tempest-ServersV294TestFqdnHostnames-929024421-project-member] Lock "10def566-2d1f-4ea2-9df5-ebf4d77f7b48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.580s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.309258] env[63297]: DEBUG nova.network.neutron [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance_info_cache with network_info: [{"id": "c31a25f5-7d02-427f-932a-464daf59e755", "address": "fa:16:3e:a0:49:93", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc31a25f5-7d", "ovs_interfaceid": "c31a25f5-7d02-427f-932a-464daf59e755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.396074] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.705689] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0593cdd7-0263-4218-aeae-3b02c7b1dc2c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.714113] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd808e33-764e-467f-851b-72339371a0de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.746295] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f19da70-8037-429b-8ece-775ac10cef13 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.754840] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d335e434-d3a5-4bb1-925d-12eb5a8b52ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.769551] env[63297]: DEBUG nova.compute.provider_tree [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1787.812386] env[63297]: DEBUG oslo_concurrency.lockutils [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.813570] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58697d6e-6c49-4501-a17e-56edbe69d425 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.821668] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d070ae-7998-489b-a9da-f40faf4eeb31 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.897503] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.275020] env[63297]: DEBUG nova.scheduler.client.report [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1788.398231] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.778902] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.779482] env[63297]: DEBUG nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1788.782701] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.414s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.785641] env[63297]: INFO nova.compute.claims [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1788.899977] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.914139] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1788.914139] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56361138-fc23-486e-a82e-c48f5426f9b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.924657] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1788.924657] env[63297]: value = "task-1698345" [ 1788.924657] env[63297]: _type = "Task" [ 1788.924657] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.936396] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.289649] env[63297]: DEBUG nova.compute.utils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1789.293158] env[63297]: DEBUG nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1789.293404] env[63297]: DEBUG nova.network.neutron [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1789.339824] env[63297]: DEBUG nova.policy [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ce4e0757c584ebdb556c79d3c0bd990', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2786fb3158214107a458dc08735ebeb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1789.400896] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.435255] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698345, 'name': PowerOffVM_Task, 'duration_secs': 0.249156} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.435872] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1789.436660] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1789.436890] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1789.437061] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1789.437249] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1789.437400] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1789.437544] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1789.437746] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1789.437964] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1789.438079] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1789.438243] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1789.438415] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1789.443319] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d344c94-ff53-4f3c-9da1-c3bf09861aef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.459942] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1789.459942] env[63297]: value = "task-1698346" [ 1789.459942] env[63297]: _type = "Task" [ 1789.459942] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.468129] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698346, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.593601] env[63297]: DEBUG nova.network.neutron [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Successfully created port: be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1789.793744] env[63297]: DEBUG nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1789.904265] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.938552] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0fc6ab-5a2b-42bd-9b65-dc5e78dfbc6d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.946645] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a48276-0a8f-4d87-8bbe-99a03724de5a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.980336] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce45334b-8bbb-4bc4-bf54-6cde27395a42 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.991366] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802421ad-2c42-47ea-aeb0-cce34677b75e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.994981] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698346, 'name': ReconfigVM_Task, 'duration_secs': 0.147982} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.996053] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f9767b-2410-4d69-b34c-a4204bbb5c3d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.007253] env[63297]: DEBUG nova.compute.provider_tree [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.026989] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.027200] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.027353] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.027541] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.027688] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.027835] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.028052] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.028217] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.028387] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.028552] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.028727] env[63297]: DEBUG nova.virt.hardware [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.030072] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30e4351b-e54a-438b-82c0-ead244b23e61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.036437] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1790.036437] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52015ff7-b1fb-b1f3-1e50-bf983b1c36ff" [ 1790.036437] env[63297]: _type = "Task" [ 1790.036437] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.044727] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52015ff7-b1fb-b1f3-1e50-bf983b1c36ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.401215] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.510586] env[63297]: DEBUG nova.scheduler.client.report [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1790.547510] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52015ff7-b1fb-b1f3-1e50-bf983b1c36ff, 'name': SearchDatastore_Task, 'duration_secs': 0.007772} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.552916] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1790.553208] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-784f6193-2089-48e3-bba8-c3d7ea04aaee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.572247] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1790.572247] env[63297]: value = "task-1698347" [ 1790.572247] env[63297]: _type = "Task" [ 1790.572247] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.580754] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698347, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.806360] env[63297]: DEBUG nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1790.833849] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.834140] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.834305] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.834492] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.834637] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.834784] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.834987] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.835223] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.835323] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.835486] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.835658] env[63297]: DEBUG nova.virt.hardware [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.836563] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7268d299-b4d1-49bf-84f7-63a6f0e0f649 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.846364] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487d37ea-5aad-4539-99a5-89ac935aeea2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.899256] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.015374] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.015776] env[63297]: DEBUG nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1791.056089] env[63297]: DEBUG nova.compute.manager [req-48f72856-5dc9-4d3f-a3f2-3c8a3894e051 req-f62a2d6e-7d9d-4222-8995-bd0765e6d5e7 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Received event network-vif-plugged-be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1791.056679] env[63297]: DEBUG oslo_concurrency.lockutils [req-48f72856-5dc9-4d3f-a3f2-3c8a3894e051 req-f62a2d6e-7d9d-4222-8995-bd0765e6d5e7 service nova] Acquiring lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.056898] env[63297]: DEBUG oslo_concurrency.lockutils [req-48f72856-5dc9-4d3f-a3f2-3c8a3894e051 req-f62a2d6e-7d9d-4222-8995-bd0765e6d5e7 service nova] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.057137] env[63297]: DEBUG oslo_concurrency.lockutils [req-48f72856-5dc9-4d3f-a3f2-3c8a3894e051 req-f62a2d6e-7d9d-4222-8995-bd0765e6d5e7 service nova] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.057260] env[63297]: DEBUG nova.compute.manager [req-48f72856-5dc9-4d3f-a3f2-3c8a3894e051 req-f62a2d6e-7d9d-4222-8995-bd0765e6d5e7 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] No waiting events found dispatching network-vif-plugged-be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1791.057429] env[63297]: WARNING nova.compute.manager [req-48f72856-5dc9-4d3f-a3f2-3c8a3894e051 req-f62a2d6e-7d9d-4222-8995-bd0765e6d5e7 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Received unexpected event network-vif-plugged-be020d1c-0688-4a14-b8d7-7d51dc77c7d5 for instance with vm_state building and task_state spawning. [ 1791.084526] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698347, 'name': ReconfigVM_Task, 'duration_secs': 0.220663} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.084799] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1791.085583] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940e8fbd-ae12-42e8-82a0-b3a8bf253159 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.111347] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d/6a99c537-e882-4c8c-b7c3-0861a5c0dc0d.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1791.111652] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35d8e353-bc30-4e5b-8290-9b8e6a77e2b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.132348] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1791.132348] env[63297]: value = "task-1698348" [ 1791.132348] env[63297]: _type = "Task" [ 1791.132348] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.141429] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698348, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.143093] env[63297]: DEBUG nova.network.neutron [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Successfully updated port: be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1791.400521] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.520856] env[63297]: DEBUG nova.compute.utils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1791.522406] env[63297]: DEBUG nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1791.522611] env[63297]: DEBUG nova.network.neutron [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1791.574764] env[63297]: DEBUG nova.policy [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d8413d4aad4ed8a1fa9e436de117ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc81b0f87c64b2283eb0ece21fb31a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1791.642610] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698348, 'name': ReconfigVM_Task, 'duration_secs': 0.291626} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.643064] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d/6a99c537-e882-4c8c-b7c3-0861a5c0dc0d.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1791.643957] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26f3e2b-9bcf-4ae3-ad2f-512fdf7e3e3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.646632] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.646779] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.646907] env[63297]: DEBUG nova.network.neutron [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1791.668779] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48e318b-35cd-4ddf-b923-1a73eda911e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.690974] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64da3e25-969e-4016-8b6d-aa99029bc73e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.714384] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9665ef-f25f-4cb7-b498-2d7ad3f0c781 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.721768] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1791.722011] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-538f8854-0060-48f4-9b22-3fcbb3fa9be8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.728841] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1791.728841] env[63297]: value = "task-1698349" [ 1791.728841] env[63297]: _type = "Task" [ 1791.728841] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.736603] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698349, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.858611] env[63297]: DEBUG nova.network.neutron [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Successfully created port: 653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1791.902704] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.026345] env[63297]: DEBUG nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1792.184995] env[63297]: DEBUG nova.network.neutron [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1792.242274] env[63297]: DEBUG oslo_vmware.api [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698349, 'name': PowerOnVM_Task, 'duration_secs': 0.386935} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.242703] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1792.330043] env[63297]: DEBUG nova.network.neutron [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Updating instance_info_cache with network_info: [{"id": "be020d1c-0688-4a14-b8d7-7d51dc77c7d5", "address": "fa:16:3e:b2:22:74", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe020d1c-06", "ovs_interfaceid": "be020d1c-0688-4a14-b8d7-7d51dc77c7d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.403081] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.531922] env[63297]: INFO nova.virt.block_device [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Booting with volume a946ca63-9887-4bf6-9a42-97242baeac81 at /dev/sda [ 1792.569100] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af4e30e5-f93b-4902-91a8-2ffa557483b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.579202] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54719370-d770-4bea-aedb-caeba29c0fdb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.610461] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9765034-3560-4b1f-8a4d-3f88ba5f7eaa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.621246] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979917b1-d518-46d4-be0b-7d0aec4e5755 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.654407] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8618882-eb23-46c6-8070-d365d0e5967d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.662565] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d582ee2c-5d85-4021-b7a0-a334e53d2f9f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.678344] env[63297]: DEBUG nova.virt.block_device [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating existing volume attachment record: a3210852-e740-459b-b8d9-ab1c90f3c53e {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1792.832404] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.832728] env[63297]: DEBUG nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Instance network_info: |[{"id": "be020d1c-0688-4a14-b8d7-7d51dc77c7d5", "address": "fa:16:3e:b2:22:74", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe020d1c-06", "ovs_interfaceid": "be020d1c-0688-4a14-b8d7-7d51dc77c7d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1792.833196] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:22:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be020d1c-0688-4a14-b8d7-7d51dc77c7d5', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1792.841822] env[63297]: DEBUG oslo.service.loopingcall [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1792.842110] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1792.842350] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bb83223-f086-4576-8186-c14143e40c33 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.867496] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1792.867496] env[63297]: value = "task-1698350" [ 1792.867496] env[63297]: _type = "Task" [ 1792.867496] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.878662] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698350, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.905380] env[63297]: DEBUG oslo_vmware.api [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698344, 'name': ReconfigVM_Task, 'duration_secs': 5.981373} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.905818] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.906086] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Reconfigured VM to detach interface {{(pid=63297) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1793.091971] env[63297]: DEBUG nova.compute.manager [req-b7a92f18-80b1-442f-bbac-259cc0eefaaf req-14d35e5a-20c4-4605-aa90-6a5f406f24e9 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Received event network-changed-be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1793.092187] env[63297]: DEBUG nova.compute.manager [req-b7a92f18-80b1-442f-bbac-259cc0eefaaf req-14d35e5a-20c4-4605-aa90-6a5f406f24e9 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Refreshing instance network info cache due to event network-changed-be020d1c-0688-4a14-b8d7-7d51dc77c7d5. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1793.092515] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7a92f18-80b1-442f-bbac-259cc0eefaaf req-14d35e5a-20c4-4605-aa90-6a5f406f24e9 service nova] Acquiring lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.092688] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7a92f18-80b1-442f-bbac-259cc0eefaaf req-14d35e5a-20c4-4605-aa90-6a5f406f24e9 service nova] Acquired lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.093209] env[63297]: DEBUG nova.network.neutron [req-b7a92f18-80b1-442f-bbac-259cc0eefaaf req-14d35e5a-20c4-4605-aa90-6a5f406f24e9 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Refreshing network info cache for port be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1793.286987] env[63297]: INFO nova.compute.manager [None req-fa74df81-1cae-486a-a5b9-c420a4bf97dc tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance to original state: 'active' [ 1793.382063] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698350, 'name': CreateVM_Task, 'duration_secs': 0.408294} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.382314] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1793.383297] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.383566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.384062] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1793.384444] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4605a741-a177-4d46-9f3f-678c3515ead5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.391282] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1793.391282] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523aa092-6014-a065-9f56-7e9a8c4edd10" [ 1793.391282] env[63297]: _type = "Task" [ 1793.391282] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.405276] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523aa092-6014-a065-9f56-7e9a8c4edd10, 'name': SearchDatastore_Task} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.405737] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.406124] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1793.406607] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.406794] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.407088] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1793.407475] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48092351-47a7-41b5-9827-468325e68385 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.419978] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1793.420322] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1793.422224] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d8a870-db44-4bb1-aea0-5795d11d02a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.429391] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1793.429391] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]525f686d-a1fe-25a7-d3f2-9f9034901d94" [ 1793.429391] env[63297]: _type = "Task" [ 1793.429391] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.438583] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525f686d-a1fe-25a7-d3f2-9f9034901d94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.700139] env[63297]: DEBUG nova.network.neutron [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Successfully updated port: 653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1793.821015] env[63297]: DEBUG nova.network.neutron [req-b7a92f18-80b1-442f-bbac-259cc0eefaaf req-14d35e5a-20c4-4605-aa90-6a5f406f24e9 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Updated VIF entry in instance network info cache for port be020d1c-0688-4a14-b8d7-7d51dc77c7d5. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1793.821475] env[63297]: DEBUG nova.network.neutron [req-b7a92f18-80b1-442f-bbac-259cc0eefaaf req-14d35e5a-20c4-4605-aa90-6a5f406f24e9 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Updating instance_info_cache with network_info: [{"id": "be020d1c-0688-4a14-b8d7-7d51dc77c7d5", "address": "fa:16:3e:b2:22:74", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe020d1c-06", "ovs_interfaceid": "be020d1c-0688-4a14-b8d7-7d51dc77c7d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.941558] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]525f686d-a1fe-25a7-d3f2-9f9034901d94, 'name': SearchDatastore_Task, 'duration_secs': 0.010703} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.942027] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f3c2a44-3642-489b-a912-adc6e3008817 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.948663] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1793.948663] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c7d03f-95d5-2310-4e58-c058dad75d0c" [ 1793.948663] env[63297]: _type = "Task" [ 1793.948663] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.958083] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c7d03f-95d5-2310-4e58-c058dad75d0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.202825] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.203074] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.203316] env[63297]: DEBUG nova.network.neutron [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1794.302816] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.302816] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquired lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.303059] env[63297]: DEBUG nova.network.neutron [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1794.323881] env[63297]: DEBUG oslo_concurrency.lockutils [req-b7a92f18-80b1-442f-bbac-259cc0eefaaf req-14d35e5a-20c4-4605-aa90-6a5f406f24e9 service nova] Releasing lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.460897] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c7d03f-95d5-2310-4e58-c058dad75d0c, 'name': SearchDatastore_Task, 'duration_secs': 0.010576} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.461220] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.461578] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d4b2da36-b0fd-47d2-95de-ef4b3f91330f/d4b2da36-b0fd-47d2-95de-ef4b3f91330f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1794.461742] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7367f53a-d416-429e-a032-3b8c77e9f71c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.469930] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1794.469930] env[63297]: value = "task-1698351" [ 1794.469930] env[63297]: _type = "Task" [ 1794.469930] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.478692] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698351, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.736772] env[63297]: DEBUG nova.network.neutron [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1794.772754] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.773066] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.773228] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.773488] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.773723] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.776725] env[63297]: DEBUG nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1794.777263] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1794.777640] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1794.777790] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1794.778057] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1794.778200] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1794.778341] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1794.778552] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1794.778713] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1794.778880] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1794.779059] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1794.779275] env[63297]: DEBUG nova.virt.hardware [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1794.779890] env[63297]: INFO nova.compute.manager [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Terminating instance [ 1794.782100] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3399414b-0a38-4ef2-89fb-702afa19f993 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.785457] env[63297]: DEBUG nova.compute.manager [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1794.785668] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1794.789089] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b7b908-8e9c-40db-a899-958c79fbd81b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.802837] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66f8e48-2b49-41a4-a4b5-91ff766929ed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.807620] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1794.810340] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5653f79f-5cfa-4d0f-922b-1ee4c7f615a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.827142] env[63297]: DEBUG oslo_vmware.api [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1794.827142] env[63297]: value = "task-1698352" [ 1794.827142] env[63297]: _type = "Task" [ 1794.827142] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.843257] env[63297]: DEBUG oslo_vmware.api [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698352, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.919333] env[63297]: DEBUG nova.network.neutron [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance_info_cache with network_info: [{"id": "653160f6-b302-49dd-8655-4703b2ac1c6c", "address": "fa:16:3e:d7:4e:28", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap653160f6-b3", "ovs_interfaceid": "653160f6-b302-49dd-8655-4703b2ac1c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.984647] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698351, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510585} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.984950] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] d4b2da36-b0fd-47d2-95de-ef4b3f91330f/d4b2da36-b0fd-47d2-95de-ef4b3f91330f.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1794.985185] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1794.986143] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60b00c42-c717-4ffe-9955-263f837f8246 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.995336] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1794.995336] env[63297]: value = "task-1698353" [ 1794.995336] env[63297]: _type = "Task" [ 1794.995336] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.005963] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.093263] env[63297]: INFO nova.network.neutron [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Port aa5d34db-425d-4449-81ca-fd7712125808 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1795.093650] env[63297]: DEBUG nova.network.neutron [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [{"id": "29d488c8-0537-45c6-97ba-b70aec11141b", "address": "fa:16:3e:d4:87:f6", "network": {"id": "250ab20f-3057-41ed-bb65-926464a4e926", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-362759298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48bfb708de5c4dd287530be2f8483ca9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d488c8-05", "ovs_interfaceid": "29d488c8-0537-45c6-97ba-b70aec11141b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.122934] env[63297]: DEBUG nova.compute.manager [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Received event network-vif-plugged-653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.123115] env[63297]: DEBUG oslo_concurrency.lockutils [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] Acquiring lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.123327] env[63297]: DEBUG oslo_concurrency.lockutils [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.123495] env[63297]: DEBUG oslo_concurrency.lockutils [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.123665] env[63297]: DEBUG nova.compute.manager [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] No waiting events found dispatching network-vif-plugged-653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1795.123830] env[63297]: WARNING nova.compute.manager [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Received unexpected event network-vif-plugged-653160f6-b302-49dd-8655-4703b2ac1c6c for instance with vm_state building and task_state spawning. [ 1795.123989] env[63297]: DEBUG nova.compute.manager [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Received event network-changed-653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.124155] env[63297]: DEBUG nova.compute.manager [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Refreshing instance network info cache due to event network-changed-653160f6-b302-49dd-8655-4703b2ac1c6c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1795.124330] env[63297]: DEBUG oslo_concurrency.lockutils [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] Acquiring lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.169292] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.169649] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.169886] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.170092] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.170268] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.172275] env[63297]: INFO nova.compute.manager [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Terminating instance [ 1795.174123] env[63297]: DEBUG nova.compute.manager [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1795.174335] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1795.174573] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5fd78d3-7fb6-4cf6-8e3d-643f975f4caa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.183893] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1795.183893] env[63297]: value = "task-1698354" [ 1795.183893] env[63297]: _type = "Task" [ 1795.183893] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.192997] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.342474] env[63297]: DEBUG oslo_vmware.api [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698352, 'name': PowerOffVM_Task, 'duration_secs': 0.270949} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.342803] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1795.343028] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1795.343324] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c001d185-7b6c-46c1-970e-b04a03ee3347 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.421474] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.421818] env[63297]: DEBUG nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Instance network_info: |[{"id": "653160f6-b302-49dd-8655-4703b2ac1c6c", "address": "fa:16:3e:d7:4e:28", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap653160f6-b3", "ovs_interfaceid": "653160f6-b302-49dd-8655-4703b2ac1c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1795.422210] env[63297]: DEBUG oslo_concurrency.lockutils [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] Acquired lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.422393] env[63297]: DEBUG nova.network.neutron [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Refreshing network info cache for port 653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1795.423705] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:4e:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '653160f6-b302-49dd-8655-4703b2ac1c6c', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1795.432241] env[63297]: DEBUG oslo.service.loopingcall [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1795.435269] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1795.435807] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f505fb7-df67-4e87-962f-e1f8cbd6426a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.458161] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1795.458161] env[63297]: value = "task-1698356" [ 1795.458161] env[63297]: _type = "Task" [ 1795.458161] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.468042] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698356, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.480300] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1795.480528] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1795.480709] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleting the datastore file [datastore1] 927824d3-a98b-47b4-a850-1fb15fd0fbe4 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1795.481272] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-398de96d-b000-4c17-95c2-f8e6a5dd7ba8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.490683] env[63297]: DEBUG oslo_vmware.api [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1795.490683] env[63297]: value = "task-1698357" [ 1795.490683] env[63297]: _type = "Task" [ 1795.490683] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.503707] env[63297]: DEBUG oslo_vmware.api [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698357, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.508877] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080488} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.509143] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1795.509909] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28426717-43f2-4f8b-99e5-4b5f7806b9d8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.532338] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] d4b2da36-b0fd-47d2-95de-ef4b3f91330f/d4b2da36-b0fd-47d2-95de-ef4b3f91330f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1795.534991] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afac3cf6-c365-49ce-916a-dd17045b6f71 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.556799] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1795.556799] env[63297]: value = "task-1698358" [ 1795.556799] env[63297]: _type = "Task" [ 1795.556799] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.566909] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698358, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.597016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Releasing lock "refresh_cache-927824d3-a98b-47b4-a850-1fb15fd0fbe4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.694031] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698354, 'name': PowerOffVM_Task, 'duration_secs': 0.258774} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.694456] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1795.694677] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1795.694894] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354020', 'volume_id': '0fa97388-d5df-41d2-befe-de9bd7aac345', 'name': 'volume-0fa97388-d5df-41d2-befe-de9bd7aac345', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d', 'attached_at': '2024-12-10T17:32:21.000000', 'detached_at': '', 'volume_id': '0fa97388-d5df-41d2-befe-de9bd7aac345', 'serial': '0fa97388-d5df-41d2-befe-de9bd7aac345'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1795.695721] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2870af-111a-4f63-94cc-06528f75120c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.717696] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea53430-0a62-4963-9f72-355993bd5e56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.726143] env[63297]: DEBUG nova.network.neutron [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updated VIF entry in instance network info cache for port 653160f6-b302-49dd-8655-4703b2ac1c6c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1795.726550] env[63297]: DEBUG nova.network.neutron [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance_info_cache with network_info: [{"id": "653160f6-b302-49dd-8655-4703b2ac1c6c", "address": "fa:16:3e:d7:4e:28", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap653160f6-b3", "ovs_interfaceid": "653160f6-b302-49dd-8655-4703b2ac1c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.728281] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f86d1f-3132-473e-905b-4615c237afc0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.752195] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404b46b1-6b55-404f-9df4-1c6ae817fc75 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.770554] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] The volume has not been displaced from its original location: [datastore1] volume-0fa97388-d5df-41d2-befe-de9bd7aac345/volume-0fa97388-d5df-41d2-befe-de9bd7aac345.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1795.776362] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1795.776706] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5a03c8e-d1eb-4e04-9e8d-ed8b2c5d63d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.807156] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1795.807156] env[63297]: value = "task-1698359" [ 1795.807156] env[63297]: _type = "Task" [ 1795.807156] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.817088] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698359, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.969045] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698356, 'name': CreateVM_Task, 'duration_secs': 0.449787} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.969278] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1795.970033] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354025', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'name': 'volume-a946ca63-9887-4bf6-9a42-97242baeac81', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b6a1f66d-783e-4263-b9c4-a4d517ce2923', 'attached_at': '', 'detached_at': '', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'serial': 'a946ca63-9887-4bf6-9a42-97242baeac81'}, 'mount_device': '/dev/sda', 'disk_bus': None, 'attachment_id': 'a3210852-e740-459b-b8d9-ab1c90f3c53e', 'guest_format': None, 'device_type': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63297) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1795.970318] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Root volume attach. Driver type: vmdk {{(pid=63297) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1795.971176] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487ad8aa-3b72-42af-bb5c-0d317c73d1fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.979799] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e17903f-7af3-4ba8-8095-3f19a6df49d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.986555] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaa630a-f19b-4391-86bc-afe48477bbcd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.996269] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-50544f0b-211d-4a82-90db-b45d0afad104 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.007566] env[63297]: DEBUG oslo_vmware.api [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698357, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17387} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.008988] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1796.009219] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1796.009403] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1796.009583] env[63297]: INFO nova.compute.manager [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1796.009821] env[63297]: DEBUG oslo.service.loopingcall [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.010093] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1796.010093] env[63297]: value = "task-1698360" [ 1796.010093] env[63297]: _type = "Task" [ 1796.010093] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.010290] env[63297]: DEBUG nova.compute.manager [-] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1796.010379] env[63297]: DEBUG nova.network.neutron [-] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1796.021208] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698360, 'name': RelocateVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.070487] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698358, 'name': ReconfigVM_Task, 'duration_secs': 0.315649} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.070728] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Reconfigured VM instance instance-00000072 to attach disk [datastore1] d4b2da36-b0fd-47d2-95de-ef4b3f91330f/d4b2da36-b0fd-47d2-95de-ef4b3f91330f.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1796.071415] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-907879d6-e367-4f94-b4a1-9d17a7be4396 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.079125] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1796.079125] env[63297]: value = "task-1698361" [ 1796.079125] env[63297]: _type = "Task" [ 1796.079125] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.093427] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698361, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.100500] env[63297]: DEBUG oslo_concurrency.lockutils [None req-ca0903dc-679f-4c9f-a817-868e68702793 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "interface-927824d3-a98b-47b4-a850-1fb15fd0fbe4-aa5d34db-425d-4449-81ca-fd7712125808" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.788s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.232686] env[63297]: DEBUG oslo_concurrency.lockutils [req-36cb1c18-cedc-4be7-ba66-ad37beacec25 req-6c270672-eadd-4bcc-aa62-bbe62dd095bd service nova] Releasing lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.317624] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698359, 'name': ReconfigVM_Task, 'duration_secs': 0.267615} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.317915] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1796.323476] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-625abe73-4d10-4b48-a83e-344373364a1c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.343246] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1796.343246] env[63297]: value = "task-1698362" [ 1796.343246] env[63297]: _type = "Task" [ 1796.343246] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.354621] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.522407] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698360, 'name': RelocateVM_Task, 'duration_secs': 0.466347} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.522700] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1796.522902] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354025', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'name': 'volume-a946ca63-9887-4bf6-9a42-97242baeac81', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b6a1f66d-783e-4263-b9c4-a4d517ce2923', 'attached_at': '', 'detached_at': '', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'serial': 'a946ca63-9887-4bf6-9a42-97242baeac81'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1796.523697] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519831c4-e06f-4ee6-8132-4d4570d5bb25 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.540164] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8210f2-47da-4bd6-ac0c-0b46f143357c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.562554] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-a946ca63-9887-4bf6-9a42-97242baeac81/volume-a946ca63-9887-4bf6-9a42-97242baeac81.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1796.562851] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1397839a-a84d-47f1-87ab-7686d18bdfe6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.585790] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1796.585790] env[63297]: value = "task-1698363" [ 1796.585790] env[63297]: _type = "Task" [ 1796.585790] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.592921] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698361, 'name': Rename_Task, 'duration_secs': 0.189305} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.593585] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1796.593877] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edb50269-c6e3-4fc6-bde7-81b5de0bb90e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.599894] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698363, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.603225] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1796.603225] env[63297]: value = "task-1698364" [ 1796.603225] env[63297]: _type = "Task" [ 1796.603225] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.612469] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.856212] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698362, 'name': ReconfigVM_Task, 'duration_secs': 0.152275} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.856586] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354020', 'volume_id': '0fa97388-d5df-41d2-befe-de9bd7aac345', 'name': 'volume-0fa97388-d5df-41d2-befe-de9bd7aac345', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '6a99c537-e882-4c8c-b7c3-0861a5c0dc0d', 'attached_at': '2024-12-10T17:32:21.000000', 'detached_at': '', 'volume_id': '0fa97388-d5df-41d2-befe-de9bd7aac345', 'serial': '0fa97388-d5df-41d2-befe-de9bd7aac345'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1796.856912] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1796.857743] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c3fb83-b02b-4fd6-9a21-95a937db21fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.866223] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1796.866548] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96daf9ef-9b4a-401f-abf4-e4064ed693cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.048088] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1797.048267] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1797.048503] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleting the datastore file [datastore1] 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1797.048924] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7deb71ab-1a0c-42ef-b447-67a68b35aade {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.058267] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1797.058267] env[63297]: value = "task-1698366" [ 1797.058267] env[63297]: _type = "Task" [ 1797.058267] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.069875] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.096350] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698363, 'name': ReconfigVM_Task, 'duration_secs': 0.345513} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.096671] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-a946ca63-9887-4bf6-9a42-97242baeac81/volume-a946ca63-9887-4bf6-9a42-97242baeac81.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1797.101458] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78ac4f88-6386-482d-be43-3596590caf51 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.119895] env[63297]: DEBUG oslo_vmware.api [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698364, 'name': PowerOnVM_Task, 'duration_secs': 0.502855} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.121179] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1797.121396] env[63297]: INFO nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Took 6.31 seconds to spawn the instance on the hypervisor. [ 1797.121571] env[63297]: DEBUG nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1797.121897] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1797.121897] env[63297]: value = "task-1698367" [ 1797.121897] env[63297]: _type = "Task" [ 1797.121897] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.122585] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82caeb9e-bb63-48a0-ba93-2f418a713028 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.132975] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698367, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.235704] env[63297]: DEBUG nova.network.neutron [-] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.279023] env[63297]: DEBUG nova.compute.manager [req-2744443e-c0b7-4f1b-a23b-2cb389a08fbc req-22f9b01e-024f-4d17-9bdb-ba38f3e447da service nova] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Received event network-vif-deleted-29d488c8-0537-45c6-97ba-b70aec11141b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1797.568870] env[63297]: DEBUG oslo_vmware.api [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172272} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.569156] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1797.569421] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1797.569662] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1797.569887] env[63297]: INFO nova.compute.manager [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Took 2.40 seconds to destroy the instance on the hypervisor. [ 1797.570179] env[63297]: DEBUG oslo.service.loopingcall [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1797.570443] env[63297]: DEBUG nova.compute.manager [-] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1797.570565] env[63297]: DEBUG nova.network.neutron [-] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1797.636142] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698367, 'name': ReconfigVM_Task, 'duration_secs': 0.135878} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.636569] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354025', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'name': 'volume-a946ca63-9887-4bf6-9a42-97242baeac81', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b6a1f66d-783e-4263-b9c4-a4d517ce2923', 'attached_at': '', 'detached_at': '', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'serial': 'a946ca63-9887-4bf6-9a42-97242baeac81'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1797.637166] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be04f927-d74c-4f1f-8b65-367fa5cbb043 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.647867] env[63297]: INFO nova.compute.manager [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Took 12.10 seconds to build instance. [ 1797.651016] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1797.651016] env[63297]: value = "task-1698368" [ 1797.651016] env[63297]: _type = "Task" [ 1797.651016] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.662419] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698368, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.738632] env[63297]: INFO nova.compute.manager [-] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Took 1.73 seconds to deallocate network for instance. [ 1798.156202] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f064c24c-5521-4159-a624-68c12f3160ac tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.612s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.162408] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698368, 'name': Rename_Task, 'duration_secs': 0.160875} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.162740] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1798.163036] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e6c6dd7-274e-4a67-a4df-a9f3f03e414c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.171174] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1798.171174] env[63297]: value = "task-1698369" [ 1798.171174] env[63297]: _type = "Task" [ 1798.171174] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.179482] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698369, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.245843] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.245843] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.245843] env[63297]: DEBUG nova.objects.instance [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'resources' on Instance uuid 927824d3-a98b-47b4-a850-1fb15fd0fbe4 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1798.682105] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.759452] env[63297]: DEBUG nova.network.neutron [-] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.869180] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764f95ec-0590-4b7a-8923-2164b46fbd47 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.878502] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76063c1-a665-459b-83c7-24f2a333ad4d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.911033] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8fd807-b61e-4f77-a847-c1d4f2dc6335 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.919717] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e1b759-e6ed-46bf-a87a-6112eb3c5512 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.935484] env[63297]: DEBUG nova.compute.provider_tree [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1799.183624] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.262663] env[63297]: INFO nova.compute.manager [-] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Took 1.69 seconds to deallocate network for instance. [ 1799.308113] env[63297]: DEBUG nova.compute.manager [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Received event network-changed-be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1799.308325] env[63297]: DEBUG nova.compute.manager [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Refreshing instance network info cache due to event network-changed-be020d1c-0688-4a14-b8d7-7d51dc77c7d5. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1799.308539] env[63297]: DEBUG oslo_concurrency.lockutils [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] Acquiring lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.308682] env[63297]: DEBUG oslo_concurrency.lockutils [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] Acquired lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.308842] env[63297]: DEBUG nova.network.neutron [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Refreshing network info cache for port be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1799.439242] env[63297]: DEBUG nova.scheduler.client.report [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1799.683397] env[63297]: DEBUG oslo_vmware.api [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698369, 'name': PowerOnVM_Task, 'duration_secs': 1.106571} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.683693] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1799.683902] env[63297]: INFO nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Took 4.91 seconds to spawn the instance on the hypervisor. [ 1799.684094] env[63297]: DEBUG nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1799.684928] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd0d23b-b4e2-4345-bd4f-2181d93c5a0b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.808989] env[63297]: INFO nova.compute.manager [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Took 0.55 seconds to detach 1 volumes for instance. [ 1799.944405] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.965951] env[63297]: INFO nova.scheduler.client.report [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleted allocations for instance 927824d3-a98b-47b4-a850-1fb15fd0fbe4 [ 1800.024236] env[63297]: DEBUG nova.network.neutron [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Updated VIF entry in instance network info cache for port be020d1c-0688-4a14-b8d7-7d51dc77c7d5. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1800.024627] env[63297]: DEBUG nova.network.neutron [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Updating instance_info_cache with network_info: [{"id": "be020d1c-0688-4a14-b8d7-7d51dc77c7d5", "address": "fa:16:3e:b2:22:74", "network": {"id": "c43c440b-993c-4790-bd7b-40eb301c2675", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1003102922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2786fb3158214107a458dc08735ebeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe020d1c-06", "ovs_interfaceid": "be020d1c-0688-4a14-b8d7-7d51dc77c7d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.204038] env[63297]: INFO nova.compute.manager [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Took 13.85 seconds to build instance. [ 1800.317725] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.318042] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.318242] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.335813] env[63297]: INFO nova.scheduler.client.report [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted allocations for instance 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d [ 1800.472263] env[63297]: DEBUG oslo_concurrency.lockutils [None req-718853b0-b6b2-4665-afa3-25e42496eff3 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "927824d3-a98b-47b4-a850-1fb15fd0fbe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.699s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.528445] env[63297]: DEBUG oslo_concurrency.lockutils [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] Releasing lock "refresh_cache-d4b2da36-b0fd-47d2-95de-ef4b3f91330f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.528737] env[63297]: DEBUG nova.compute.manager [req-b46ea266-c8cf-47df-ae61-acf48edfc46f req-b49930f7-bd8d-4015-bb07-8a3990562887 service nova] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Received event network-vif-deleted-c31a25f5-7d02-427f-932a-464daf59e755 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1800.706666] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d50704f4-4d4e-4cb2-9b5c-ce5f5e9e0036 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.365s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.844067] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f3a0a7d4-8fd3-4269-864d-ec898ae76d0c tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "6a99c537-e882-4c8c-b7c3-0861a5c0dc0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.674s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.337382] env[63297]: DEBUG nova.compute.manager [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Received event network-changed-1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1801.337382] env[63297]: DEBUG nova.compute.manager [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Refreshing instance network info cache due to event network-changed-1952432a-7339-4c5d-80fc-5dac66b659e2. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1801.337806] env[63297]: DEBUG oslo_concurrency.lockutils [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] Acquiring lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.337806] env[63297]: DEBUG oslo_concurrency.lockutils [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] Acquired lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.337806] env[63297]: DEBUG nova.network.neutron [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Refreshing network info cache for port 1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1801.438973] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.439787] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.440016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.440220] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.440391] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.442411] env[63297]: INFO nova.compute.manager [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Terminating instance [ 1801.444377] env[63297]: DEBUG nova.compute.manager [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1801.444591] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1801.445449] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2289be68-86c3-4e07-a7e9-39edc2764568 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.454013] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1801.454098] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f31bd44b-f3f3-44ab-a311-c2b6c98be143 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.461432] env[63297]: DEBUG oslo_vmware.api [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1801.461432] env[63297]: value = "task-1698370" [ 1801.461432] env[63297]: _type = "Task" [ 1801.461432] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.470386] env[63297]: DEBUG oslo_vmware.api [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.973957] env[63297]: DEBUG oslo_vmware.api [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698370, 'name': PowerOffVM_Task, 'duration_secs': 0.210862} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.974248] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.974416] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1801.974676] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de2b24e2-1688-42ff-8d14-cfd9a1d8c5a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.072236] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1802.072476] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1802.072659] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleting the datastore file [datastore1] c1696ee9-cb48-414c-b0a0-b6fa2e880a81 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1802.072927] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23f8a283-98f8-4d73-90dc-53b61b248682 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.083067] env[63297]: DEBUG oslo_vmware.api [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for the task: (returnval){ [ 1802.083067] env[63297]: value = "task-1698372" [ 1802.083067] env[63297]: _type = "Task" [ 1802.083067] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.092730] env[63297]: DEBUG oslo_vmware.api [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698372, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.286563] env[63297]: DEBUG nova.network.neutron [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updated VIF entry in instance network info cache for port 1952432a-7339-4c5d-80fc-5dac66b659e2. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1802.287088] env[63297]: DEBUG nova.network.neutron [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updating instance_info_cache with network_info: [{"id": "1952432a-7339-4c5d-80fc-5dac66b659e2", "address": "fa:16:3e:80:2c:fe", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1952432a-73", "ovs_interfaceid": "1952432a-7339-4c5d-80fc-5dac66b659e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.376269] env[63297]: DEBUG nova.compute.manager [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1802.435893] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.436215] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.593820] env[63297]: DEBUG oslo_vmware.api [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Task: {'id': task-1698372, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164162} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.594625] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1802.594819] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1802.595009] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1802.595195] env[63297]: INFO nova.compute.manager [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1802.595436] env[63297]: DEBUG oslo.service.loopingcall [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.595625] env[63297]: DEBUG nova.compute.manager [-] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1802.595718] env[63297]: DEBUG nova.network.neutron [-] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1802.789585] env[63297]: DEBUG oslo_concurrency.lockutils [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] Releasing lock "refresh_cache-fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.789877] env[63297]: DEBUG nova.compute.manager [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Received event network-changed-653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1802.790163] env[63297]: DEBUG nova.compute.manager [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Refreshing instance network info cache due to event network-changed-653160f6-b302-49dd-8655-4703b2ac1c6c. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1802.790480] env[63297]: DEBUG oslo_concurrency.lockutils [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] Acquiring lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.790675] env[63297]: DEBUG oslo_concurrency.lockutils [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] Acquired lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.790849] env[63297]: DEBUG nova.network.neutron [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Refreshing network info cache for port 653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1802.906526] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.906526] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.938902] env[63297]: DEBUG nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1803.320752] env[63297]: DEBUG nova.network.neutron [-] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.363524] env[63297]: DEBUG nova.compute.manager [req-b06a43a6-1ad6-4a56-a230-ad2b4696acd3 req-5ab537c9-2d7f-4a74-bf41-619393379997 service nova] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Received event network-vif-deleted-da1879c7-6003-4193-ab1c-019184bded8d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1803.407587] env[63297]: INFO nova.compute.claims [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1803.458089] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.573655] env[63297]: DEBUG nova.network.neutron [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updated VIF entry in instance network info cache for port 653160f6-b302-49dd-8655-4703b2ac1c6c. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1803.574053] env[63297]: DEBUG nova.network.neutron [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance_info_cache with network_info: [{"id": "653160f6-b302-49dd-8655-4703b2ac1c6c", "address": "fa:16:3e:d7:4e:28", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap653160f6-b3", "ovs_interfaceid": "653160f6-b302-49dd-8655-4703b2ac1c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.825122] env[63297]: INFO nova.compute.manager [-] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Took 1.23 seconds to deallocate network for instance. [ 1803.913494] env[63297]: INFO nova.compute.resource_tracker [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating resource usage from migration 170e9d1f-1d98-44ad-b949-2754f5f3dffa [ 1804.011509] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff490da2-f52f-4ce0-891d-8406a1aa76d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.020040] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e1e19b-a3a4-4a0f-a4ed-f8385cedafed {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.050931] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bb145b-c8d5-4951-82e3-92a7543d2d46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.059008] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68275747-e7c4-42d7-9bf2-0ad01a0d25ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.072442] env[63297]: DEBUG nova.compute.provider_tree [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1804.078650] env[63297]: DEBUG oslo_concurrency.lockutils [req-2d101297-c169-47d5-8b24-6f204870ce12 req-0c295a81-401a-4051-9cbd-90c1cc6f1e95 service nova] Releasing lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.331638] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.576212] env[63297]: DEBUG nova.scheduler.client.report [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1805.080997] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.178s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.081228] env[63297]: INFO nova.compute.manager [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Migrating [ 1805.088455] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.630s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.089941] env[63297]: INFO nova.compute.claims [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1805.599400] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.599689] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.599860] env[63297]: DEBUG nova.network.neutron [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1806.215408] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0670e65e-2afc-4760-9ca9-186e5ac5451f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.225573] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08584515-85e2-4367-875b-84008a5aea53 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.258807] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f6f28c-b47d-4c7e-86a9-3538b65d05b5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.268502] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807c2d2a-509e-4ea7-aed1-9e58f81d6a5c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.281700] env[63297]: DEBUG nova.compute.provider_tree [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1806.356220] env[63297]: DEBUG nova.network.neutron [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance_info_cache with network_info: [{"id": "653160f6-b302-49dd-8655-4703b2ac1c6c", "address": "fa:16:3e:d7:4e:28", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap653160f6-b3", "ovs_interfaceid": "653160f6-b302-49dd-8655-4703b2ac1c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.784598] env[63297]: DEBUG nova.scheduler.client.report [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1806.859611] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.290017] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.290576] env[63297]: DEBUG nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1807.293262] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.962s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.293525] env[63297]: DEBUG nova.objects.instance [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lazy-loading 'resources' on Instance uuid c1696ee9-cb48-414c-b0a0-b6fa2e880a81 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1807.796225] env[63297]: DEBUG nova.compute.utils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1807.800335] env[63297]: DEBUG nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1807.800484] env[63297]: DEBUG nova.network.neutron [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1807.850754] env[63297]: DEBUG nova.policy [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be5e07baf148496880261386dff8df76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e3dcd98ebe94a75a94322b03feba3b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1807.906519] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3950159b-9d43-42cb-a9bd-d8bcfa612910 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.915578] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2907490f-03ac-4adf-9abd-cf9b31544656 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.945229] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4957dc29-7015-4b85-bb89-3c97df501012 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.953439] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31e776e-9322-4041-8ac6-91b6b45255c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.966786] env[63297]: DEBUG nova.compute.provider_tree [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1808.163711] env[63297]: DEBUG nova.network.neutron [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Successfully created port: 22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1808.304025] env[63297]: DEBUG nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1808.376966] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6261437f-d9c7-4747-8a67-8fc4746d18dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.397867] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance 'b6a1f66d-783e-4263-b9c4-a4d517ce2923' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1808.470919] env[63297]: DEBUG nova.scheduler.client.report [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1808.903774] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1808.904151] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47fd4682-f657-4d4f-9651-7ae503612b32 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.912124] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1808.912124] env[63297]: value = "task-1698373" [ 1808.912124] env[63297]: _type = "Task" [ 1808.912124] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.921083] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.974772] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.998514] env[63297]: INFO nova.scheduler.client.report [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Deleted allocations for instance c1696ee9-cb48-414c-b0a0-b6fa2e880a81 [ 1809.312026] env[63297]: DEBUG nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1809.344213] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1809.344537] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1809.344699] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1809.344879] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1809.345036] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1809.345189] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1809.345392] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1809.345545] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1809.345715] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1809.345875] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1809.346060] env[63297]: DEBUG nova.virt.hardware [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1809.346896] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5973c073-d536-42fe-91f7-422d2f04de8b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.355728] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b763fd6-0326-4fba-afd6-69011bdfc371 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.422314] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698373, 'name': PowerOffVM_Task, 'duration_secs': 0.195907} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.422661] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1809.422917] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance 'b6a1f66d-783e-4263-b9c4-a4d517ce2923' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1809.506303] env[63297]: DEBUG oslo_concurrency.lockutils [None req-06ca42af-d1cd-4245-92f8-cb2bd0e259c6 tempest-AttachInterfacesTestJSON-760324721 tempest-AttachInterfacesTestJSON-760324721-project-member] Lock "c1696ee9-cb48-414c-b0a0-b6fa2e880a81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.066s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.589573] env[63297]: DEBUG nova.compute.manager [req-7931a434-b800-412b-a497-38a396c0815a req-f6f03d98-f934-4db3-9c2e-d8071bd68b60 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received event network-vif-plugged-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1809.589781] env[63297]: DEBUG oslo_concurrency.lockutils [req-7931a434-b800-412b-a497-38a396c0815a req-f6f03d98-f934-4db3-9c2e-d8071bd68b60 service nova] Acquiring lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.589990] env[63297]: DEBUG oslo_concurrency.lockutils [req-7931a434-b800-412b-a497-38a396c0815a req-f6f03d98-f934-4db3-9c2e-d8071bd68b60 service nova] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.590533] env[63297]: DEBUG oslo_concurrency.lockutils [req-7931a434-b800-412b-a497-38a396c0815a req-f6f03d98-f934-4db3-9c2e-d8071bd68b60 service nova] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.590713] env[63297]: DEBUG nova.compute.manager [req-7931a434-b800-412b-a497-38a396c0815a req-f6f03d98-f934-4db3-9c2e-d8071bd68b60 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] No waiting events found dispatching network-vif-plugged-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1809.590882] env[63297]: WARNING nova.compute.manager [req-7931a434-b800-412b-a497-38a396c0815a req-f6f03d98-f934-4db3-9c2e-d8071bd68b60 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received unexpected event network-vif-plugged-22445506-b44b-4648-8c7b-164ab284edb9 for instance with vm_state building and task_state spawning. [ 1809.676446] env[63297]: DEBUG nova.network.neutron [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Successfully updated port: 22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1809.929737] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1809.930083] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1809.930187] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1809.930399] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1809.930569] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1809.930725] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1809.930925] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1809.931099] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1809.931273] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1809.931436] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1809.931610] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1809.936866] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b23620ae-8d4c-41ab-9ee9-e0951fe20310 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.955364] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1809.955364] env[63297]: value = "task-1698374" [ 1809.955364] env[63297]: _type = "Task" [ 1809.955364] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.963630] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.180390] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.180607] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.180712] env[63297]: DEBUG nova.network.neutron [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1810.466162] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698374, 'name': ReconfigVM_Task, 'duration_secs': 0.168311} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.466489] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance 'b6a1f66d-783e-4263-b9c4-a4d517ce2923' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1810.714682] env[63297]: DEBUG nova.network.neutron [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1810.841445] env[63297]: DEBUG nova.network.neutron [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [{"id": "22445506-b44b-4648-8c7b-164ab284edb9", "address": "fa:16:3e:f3:77:fe", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22445506-b4", "ovs_interfaceid": "22445506-b44b-4648-8c7b-164ab284edb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.973467] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1810.973916] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1810.973993] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1810.974211] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1810.974408] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1810.974596] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1810.974843] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1810.975557] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1810.979297] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1810.979489] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1810.979675] env[63297]: DEBUG nova.virt.hardware [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1810.986611] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1810.986977] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6912063-577e-4b95-a1ac-c57e98a6c280 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.011103] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1811.011103] env[63297]: value = "task-1698375" [ 1811.011103] env[63297]: _type = "Task" [ 1811.011103] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.020389] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.344690] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.344875] env[63297]: DEBUG nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Instance network_info: |[{"id": "22445506-b44b-4648-8c7b-164ab284edb9", "address": "fa:16:3e:f3:77:fe", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22445506-b4", "ovs_interfaceid": "22445506-b44b-4648-8c7b-164ab284edb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1811.345760] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:77:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e55c248-c504-4c7a-bbe9-f42cf417aee7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22445506-b44b-4648-8c7b-164ab284edb9', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1811.356237] env[63297]: DEBUG oslo.service.loopingcall [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1811.357478] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1811.357774] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a7af006-83ba-4e95-9e47-c4c94d911df2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.380136] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1811.380136] env[63297]: value = "task-1698376" [ 1811.380136] env[63297]: _type = "Task" [ 1811.380136] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.389597] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698376, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.521771] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698375, 'name': ReconfigVM_Task, 'duration_secs': 0.20327} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.522056] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1811.522831] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd8c65a-fe2b-4463-8cfe-1432d5ae88ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.545890] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-a946ca63-9887-4bf6-9a42-97242baeac81/volume-a946ca63-9887-4bf6-9a42-97242baeac81.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1811.546182] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-369d721d-0ef4-44d8-be7a-785eb6d0300a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.565403] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1811.565403] env[63297]: value = "task-1698377" [ 1811.565403] env[63297]: _type = "Task" [ 1811.565403] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.574974] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698377, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.621989] env[63297]: DEBUG nova.compute.manager [req-74b2a9fc-783c-491e-8373-5174e2f96948 req-a77e0d24-ba72-46ff-ab31-e7810f9cc5c7 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received event network-changed-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1811.622235] env[63297]: DEBUG nova.compute.manager [req-74b2a9fc-783c-491e-8373-5174e2f96948 req-a77e0d24-ba72-46ff-ab31-e7810f9cc5c7 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Refreshing instance network info cache due to event network-changed-22445506-b44b-4648-8c7b-164ab284edb9. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1811.622570] env[63297]: DEBUG oslo_concurrency.lockutils [req-74b2a9fc-783c-491e-8373-5174e2f96948 req-a77e0d24-ba72-46ff-ab31-e7810f9cc5c7 service nova] Acquiring lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.622785] env[63297]: DEBUG oslo_concurrency.lockutils [req-74b2a9fc-783c-491e-8373-5174e2f96948 req-a77e0d24-ba72-46ff-ab31-e7810f9cc5c7 service nova] Acquired lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.622984] env[63297]: DEBUG nova.network.neutron [req-74b2a9fc-783c-491e-8373-5174e2f96948 req-a77e0d24-ba72-46ff-ab31-e7810f9cc5c7 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Refreshing network info cache for port 22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1811.892123] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698376, 'name': CreateVM_Task, 'duration_secs': 0.491927} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.892412] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1811.893264] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.893850] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.894147] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1811.894427] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f1728db-5d62-49fe-8568-448500886bf8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.903020] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1811.903020] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5223dcd9-2218-5960-0fc4-2f8886637ba0" [ 1811.903020] env[63297]: _type = "Task" [ 1811.903020] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.914509] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5223dcd9-2218-5960-0fc4-2f8886637ba0, 'name': SearchDatastore_Task, 'duration_secs': 0.012013} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.916054] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.916054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1811.916054] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.916054] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.916054] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1811.916054] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91225040-19aa-493f-8d13-3590536a12ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.925345] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1811.926137] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1811.926321] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dba45804-5aed-48ee-a52b-271166840e66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.932094] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1811.932094] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d18ef7-4370-130d-db56-95313fef2aef" [ 1811.932094] env[63297]: _type = "Task" [ 1811.932094] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.940386] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d18ef7-4370-130d-db56-95313fef2aef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.076211] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698377, 'name': ReconfigVM_Task, 'duration_secs': 0.291852} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.076509] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-a946ca63-9887-4bf6-9a42-97242baeac81/volume-a946ca63-9887-4bf6-9a42-97242baeac81.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1812.076749] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance 'b6a1f66d-783e-4263-b9c4-a4d517ce2923' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1812.336445] env[63297]: DEBUG nova.network.neutron [req-74b2a9fc-783c-491e-8373-5174e2f96948 req-a77e0d24-ba72-46ff-ab31-e7810f9cc5c7 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updated VIF entry in instance network info cache for port 22445506-b44b-4648-8c7b-164ab284edb9. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1812.336795] env[63297]: DEBUG nova.network.neutron [req-74b2a9fc-783c-491e-8373-5174e2f96948 req-a77e0d24-ba72-46ff-ab31-e7810f9cc5c7 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [{"id": "22445506-b44b-4648-8c7b-164ab284edb9", "address": "fa:16:3e:f3:77:fe", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22445506-b4", "ovs_interfaceid": "22445506-b44b-4648-8c7b-164ab284edb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.443373] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d18ef7-4370-130d-db56-95313fef2aef, 'name': SearchDatastore_Task, 'duration_secs': 0.009148} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.444246] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd0a2c89-ce8f-4391-b4b5-b17f33d376dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.450704] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1812.450704] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5228f7bd-a612-1221-085d-7efb8e7f06dc" [ 1812.450704] env[63297]: _type = "Task" [ 1812.450704] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.458710] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5228f7bd-a612-1221-085d-7efb8e7f06dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.584024] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ee6f61-22c6-4f77-a910-5443f9c81a1f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.605253] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c90679-4576-4de6-95bf-ae5ef2c644e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.627257] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance 'b6a1f66d-783e-4263-b9c4-a4d517ce2923' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1812.839878] env[63297]: DEBUG oslo_concurrency.lockutils [req-74b2a9fc-783c-491e-8373-5174e2f96948 req-a77e0d24-ba72-46ff-ab31-e7810f9cc5c7 service nova] Releasing lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.962245] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5228f7bd-a612-1221-085d-7efb8e7f06dc, 'name': SearchDatastore_Task, 'duration_secs': 0.0106} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.962520] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.962785] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e/1316bf99-cc93-4d1a-b31c-000dac095b3e.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1812.963063] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02418add-29ef-4ae2-be56-6349b5db0090 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.972677] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1812.972677] env[63297]: value = "task-1698378" [ 1812.972677] env[63297]: _type = "Task" [ 1812.972677] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.982200] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698378, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.001816] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.002161] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.002259] env[63297]: INFO nova.compute.manager [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Shelving [ 1813.484043] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698378, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.511281] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1813.511637] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30825580-a4a9-4057-8ef3-f4640aef8fa1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.522179] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1813.522179] env[63297]: value = "task-1698379" [ 1813.522179] env[63297]: _type = "Task" [ 1813.522179] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.533191] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698379, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.983376] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698378, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647982} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.983645] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e/1316bf99-cc93-4d1a-b31c-000dac095b3e.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1813.983931] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1813.984123] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-431a5191-92d7-4737-a811-6baac01a7692 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.992232] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1813.992232] env[63297]: value = "task-1698380" [ 1813.992232] env[63297]: _type = "Task" [ 1813.992232] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.002016] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.033519] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698379, 'name': PowerOffVM_Task, 'duration_secs': 0.212669} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.033861] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1814.034673] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef20410f-e829-4952-8e8d-5951c699a2b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.053872] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ba0358-4ecc-4f3a-a34f-6de13397f43c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.277431] env[63297]: DEBUG nova.network.neutron [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Port 653160f6-b302-49dd-8655-4703b2ac1c6c binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1814.505398] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070835} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.505690] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1814.506521] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ae1c46-64e0-4f40-8260-a4dd13002efb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.532303] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e/1316bf99-cc93-4d1a-b31c-000dac095b3e.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1814.532548] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b4752f6-0c73-4103-9bde-c1c12a3efd46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.554281] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1814.554281] env[63297]: value = "task-1698381" [ 1814.554281] env[63297]: _type = "Task" [ 1814.554281] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.563293] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.565404] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1814.565685] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-33a07e47-c920-4b23-a89d-24b9d0db5d68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.573909] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1814.573909] env[63297]: value = "task-1698382" [ 1814.573909] env[63297]: _type = "Task" [ 1814.573909] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.582513] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698382, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.065479] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.084663] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698382, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.295407] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.295651] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.295824] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.565837] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698381, 'name': ReconfigVM_Task, 'duration_secs': 0.890777} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.566194] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e/1316bf99-cc93-4d1a-b31c-000dac095b3e.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1815.566901] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2273984-5672-4f15-8cbb-b12e9450b4d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.574872] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1815.574872] env[63297]: value = "task-1698383" [ 1815.574872] env[63297]: _type = "Task" [ 1815.574872] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.592638] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698382, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.592890] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698383, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.089089] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698382, 'name': CreateSnapshot_Task, 'duration_secs': 1.037024} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.092027] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1816.092311] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698383, 'name': Rename_Task, 'duration_secs': 0.167884} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.093011] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091a196b-bab2-466d-afc0-c0088b8765a4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.095412] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1816.095637] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c503201-a0db-40e9-8a5b-a97b5026c0fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.108708] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1816.108708] env[63297]: value = "task-1698384" [ 1816.108708] env[63297]: _type = "Task" [ 1816.108708] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.119100] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.189497] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "41d4118d-7621-4ac9-be2f-2664cd691180" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.189728] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "41d4118d-7621-4ac9-be2f-2664cd691180" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.341846] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.342231] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.342536] env[63297]: DEBUG nova.network.neutron [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1816.616920] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1816.617388] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6b03df6d-160d-49c9-bd03-47f3eaf2bf4c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.630129] env[63297]: DEBUG oslo_vmware.api [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698384, 'name': PowerOnVM_Task, 'duration_secs': 0.454645} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.631553] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1816.631785] env[63297]: INFO nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Took 7.32 seconds to spawn the instance on the hypervisor. [ 1816.631965] env[63297]: DEBUG nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1816.632415] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1816.632415] env[63297]: value = "task-1698385" [ 1816.632415] env[63297]: _type = "Task" [ 1816.632415] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.633105] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec41514-a857-4cb5-9d63-f38fe11e7f16 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.649466] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698385, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.693109] env[63297]: DEBUG nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1817.091651] env[63297]: DEBUG nova.network.neutron [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance_info_cache with network_info: [{"id": "653160f6-b302-49dd-8655-4703b2ac1c6c", "address": "fa:16:3e:d7:4e:28", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap653160f6-b3", "ovs_interfaceid": "653160f6-b302-49dd-8655-4703b2ac1c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.155981] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698385, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.161628] env[63297]: INFO nova.compute.manager [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Took 13.72 seconds to build instance. [ 1817.214913] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.215327] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.216872] env[63297]: INFO nova.compute.claims [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1817.594972] env[63297]: DEBUG oslo_concurrency.lockutils [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.651413] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698385, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.664825] env[63297]: DEBUG oslo_concurrency.lockutils [None req-266b4e67-608d-43db-bce6-4cd5b9fc0989 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.228s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.945793] env[63297]: DEBUG nova.compute.manager [req-78efa93d-fd53-4c96-97e0-2a68f3b925ae req-1995be30-2fb2-47e5-a29d-ac9263aa58d5 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received event network-changed-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1817.945990] env[63297]: DEBUG nova.compute.manager [req-78efa93d-fd53-4c96-97e0-2a68f3b925ae req-1995be30-2fb2-47e5-a29d-ac9263aa58d5 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Refreshing instance network info cache due to event network-changed-22445506-b44b-4648-8c7b-164ab284edb9. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1817.946262] env[63297]: DEBUG oslo_concurrency.lockutils [req-78efa93d-fd53-4c96-97e0-2a68f3b925ae req-1995be30-2fb2-47e5-a29d-ac9263aa58d5 service nova] Acquiring lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.946410] env[63297]: DEBUG oslo_concurrency.lockutils [req-78efa93d-fd53-4c96-97e0-2a68f3b925ae req-1995be30-2fb2-47e5-a29d-ac9263aa58d5 service nova] Acquired lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.946573] env[63297]: DEBUG nova.network.neutron [req-78efa93d-fd53-4c96-97e0-2a68f3b925ae req-1995be30-2fb2-47e5-a29d-ac9263aa58d5 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Refreshing network info cache for port 22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1818.104809] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd58d75e-51e8-4749-bd25-355a407e68f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.113319] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c20501-5641-42be-88dc-20e7d2c392ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.149350] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698385, 'name': CloneVM_Task, 'duration_secs': 1.253627} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.149576] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Created linked-clone VM from snapshot [ 1818.150377] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0c821d-08d4-4df9-aee9-177f5a3cf57b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.158376] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Uploading image 2aadb7a8-891e-4a45-b2ae-792004e8b06b {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1818.186808] env[63297]: DEBUG oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1818.186808] env[63297]: value = "vm-354030" [ 1818.186808] env[63297]: _type = "VirtualMachine" [ 1818.186808] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1818.187149] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8bc73685-e278-4af0-b642-177fdb7f285e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.195581] env[63297]: DEBUG oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lease: (returnval){ [ 1818.195581] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238053d-1695-40a1-4599-d9c77422ef16" [ 1818.195581] env[63297]: _type = "HttpNfcLease" [ 1818.195581] env[63297]: } obtained for exporting VM: (result){ [ 1818.195581] env[63297]: value = "vm-354030" [ 1818.195581] env[63297]: _type = "VirtualMachine" [ 1818.195581] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1818.195581] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the lease: (returnval){ [ 1818.195581] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238053d-1695-40a1-4599-d9c77422ef16" [ 1818.195581] env[63297]: _type = "HttpNfcLease" [ 1818.195581] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1818.203915] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1818.203915] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238053d-1695-40a1-4599-d9c77422ef16" [ 1818.203915] env[63297]: _type = "HttpNfcLease" [ 1818.203915] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1818.357488] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bb3045-c9fb-4e17-bfd3-1e107d041fa0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.366079] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d445d64-563d-4ec7-80b1-15c497c234f0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.396794] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f31edb3-5fcb-4c0e-b457-febb93b3f5c4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.404263] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0722508a-d168-4f77-b8b0-7bf8500a16e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.417753] env[63297]: DEBUG nova.compute.provider_tree [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.652084] env[63297]: DEBUG nova.network.neutron [req-78efa93d-fd53-4c96-97e0-2a68f3b925ae req-1995be30-2fb2-47e5-a29d-ac9263aa58d5 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updated VIF entry in instance network info cache for port 22445506-b44b-4648-8c7b-164ab284edb9. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1818.652646] env[63297]: DEBUG nova.network.neutron [req-78efa93d-fd53-4c96-97e0-2a68f3b925ae req-1995be30-2fb2-47e5-a29d-ac9263aa58d5 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [{"id": "22445506-b44b-4648-8c7b-164ab284edb9", "address": "fa:16:3e:f3:77:fe", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22445506-b4", "ovs_interfaceid": "22445506-b44b-4648-8c7b-164ab284edb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.707150] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1818.707150] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238053d-1695-40a1-4599-d9c77422ef16" [ 1818.707150] env[63297]: _type = "HttpNfcLease" [ 1818.707150] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1818.707470] env[63297]: DEBUG oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1818.707470] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5238053d-1695-40a1-4599-d9c77422ef16" [ 1818.707470] env[63297]: _type = "HttpNfcLease" [ 1818.707470] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1818.708230] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1abe70-e419-45c5-8d84-a35524ea2581 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.717577] env[63297]: DEBUG oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52310e8e-f528-793a-8dd9-c8d063a52865/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1818.717767] env[63297]: DEBUG oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52310e8e-f528-793a-8dd9-c8d063a52865/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1818.867440] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-76d19c27-4785-4bc9-ab57-d06ab14cc859 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.920758] env[63297]: DEBUG nova.scheduler.client.report [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1819.156186] env[63297]: DEBUG oslo_concurrency.lockutils [req-78efa93d-fd53-4c96-97e0-2a68f3b925ae req-1995be30-2fb2-47e5-a29d-ac9263aa58d5 service nova] Releasing lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.218095] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16535dea-8b04-454e-8a2c-22b6f36c370b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.240294] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dde5f6-4314-4e79-8906-08eb604fc515 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.249139] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance 'b6a1f66d-783e-4263-b9c4-a4d517ce2923' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1819.427042] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.212s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.427740] env[63297]: DEBUG nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1819.755517] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1819.755850] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7d74e18-1116-4701-a1b7-705d147413fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.764823] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1819.764823] env[63297]: value = "task-1698387" [ 1819.764823] env[63297]: _type = "Task" [ 1819.764823] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.776903] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698387, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.933779] env[63297]: DEBUG nova.compute.utils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1819.935611] env[63297]: DEBUG nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1819.935913] env[63297]: DEBUG nova.network.neutron [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1819.977060] env[63297]: DEBUG nova.policy [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f356beaa7126453fab5125781984ab3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8591e3bee13e474b88f592525d95a2e5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1820.261964] env[63297]: DEBUG nova.network.neutron [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Successfully created port: 199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1820.277046] env[63297]: DEBUG oslo_vmware.api [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698387, 'name': PowerOnVM_Task, 'duration_secs': 0.439786} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.277492] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1820.277745] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-122f58e6-5bba-4160-993c-09ee4107d821 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance 'b6a1f66d-783e-4263-b9c4-a4d517ce2923' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1820.439955] env[63297]: DEBUG nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1821.450073] env[63297]: DEBUG nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1821.479735] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1821.479989] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1821.480165] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1821.480351] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1821.480495] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1821.480642] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1821.480852] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1821.481017] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1821.481183] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1821.481344] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1821.481525] env[63297]: DEBUG nova.virt.hardware [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1821.482440] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a860ce80-0c80-4cce-bd56-a8e12d6b79ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.491466] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281e9aa0-6d19-4e83-9d69-4b2aa73f462f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.765600] env[63297]: DEBUG nova.compute.manager [req-d608ba9f-0a68-4447-98c1-7d19f03fdc97 req-b100f0e6-7040-4000-aac4-14f28d32d551 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Received event network-vif-plugged-199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1821.765838] env[63297]: DEBUG oslo_concurrency.lockutils [req-d608ba9f-0a68-4447-98c1-7d19f03fdc97 req-b100f0e6-7040-4000-aac4-14f28d32d551 service nova] Acquiring lock "41d4118d-7621-4ac9-be2f-2664cd691180-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.766071] env[63297]: DEBUG oslo_concurrency.lockutils [req-d608ba9f-0a68-4447-98c1-7d19f03fdc97 req-b100f0e6-7040-4000-aac4-14f28d32d551 service nova] Lock "41d4118d-7621-4ac9-be2f-2664cd691180-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.766247] env[63297]: DEBUG oslo_concurrency.lockutils [req-d608ba9f-0a68-4447-98c1-7d19f03fdc97 req-b100f0e6-7040-4000-aac4-14f28d32d551 service nova] Lock "41d4118d-7621-4ac9-be2f-2664cd691180-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.766619] env[63297]: DEBUG nova.compute.manager [req-d608ba9f-0a68-4447-98c1-7d19f03fdc97 req-b100f0e6-7040-4000-aac4-14f28d32d551 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] No waiting events found dispatching network-vif-plugged-199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1821.766619] env[63297]: WARNING nova.compute.manager [req-d608ba9f-0a68-4447-98c1-7d19f03fdc97 req-b100f0e6-7040-4000-aac4-14f28d32d551 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Received unexpected event network-vif-plugged-199fed1c-c158-4c42-85e0-83b3e9035230 for instance with vm_state building and task_state spawning. [ 1821.843871] env[63297]: DEBUG nova.network.neutron [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Successfully updated port: 199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1822.328357] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.328765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.329088] env[63297]: DEBUG nova.compute.manager [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Going to confirm migration 8 {{(pid=63297) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1822.346868] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.347171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquired lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.347260] env[63297]: DEBUG nova.network.neutron [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.865173] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.865451] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquired lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.865544] env[63297]: DEBUG nova.network.neutron [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.865744] env[63297]: DEBUG nova.objects.instance [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'info_cache' on Instance uuid b6a1f66d-783e-4263-b9c4-a4d517ce2923 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1822.878835] env[63297]: DEBUG nova.network.neutron [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1823.067571] env[63297]: DEBUG nova.network.neutron [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.570278] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Releasing lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.570644] env[63297]: DEBUG nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Instance network_info: |[{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1823.571121] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:93:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b5f9472-1844-4c99-8804-8f193cfff562', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '199fed1c-c158-4c42-85e0-83b3e9035230', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1823.578929] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Creating folder: Project (8591e3bee13e474b88f592525d95a2e5). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1823.579251] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4355ae9b-03c7-4d58-bd7c-5f69700e4f66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.593677] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Created folder: Project (8591e3bee13e474b88f592525d95a2e5) in parent group-v353718. [ 1823.593964] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Creating folder: Instances. Parent ref: group-v354031. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1823.594210] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cf703a8-0ecb-4c63-a2fc-83a6f64cfb49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.606017] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Created folder: Instances in parent group-v354031. [ 1823.606273] env[63297]: DEBUG oslo.service.loopingcall [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.606499] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1823.606709] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2435ea25-3186-4410-a9e8-474ca17db3a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.626963] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1823.626963] env[63297]: value = "task-1698390" [ 1823.626963] env[63297]: _type = "Task" [ 1823.626963] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.635689] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698390, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.792452] env[63297]: DEBUG nova.compute.manager [req-9ed0db61-ce27-4d2b-a51c-fa87ca97f4bd req-5681dfe2-ac53-4dd8-8eee-018b6e7a3966 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Received event network-changed-199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.792674] env[63297]: DEBUG nova.compute.manager [req-9ed0db61-ce27-4d2b-a51c-fa87ca97f4bd req-5681dfe2-ac53-4dd8-8eee-018b6e7a3966 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Refreshing instance network info cache due to event network-changed-199fed1c-c158-4c42-85e0-83b3e9035230. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1823.792869] env[63297]: DEBUG oslo_concurrency.lockutils [req-9ed0db61-ce27-4d2b-a51c-fa87ca97f4bd req-5681dfe2-ac53-4dd8-8eee-018b6e7a3966 service nova] Acquiring lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.793035] env[63297]: DEBUG oslo_concurrency.lockutils [req-9ed0db61-ce27-4d2b-a51c-fa87ca97f4bd req-5681dfe2-ac53-4dd8-8eee-018b6e7a3966 service nova] Acquired lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.793204] env[63297]: DEBUG nova.network.neutron [req-9ed0db61-ce27-4d2b-a51c-fa87ca97f4bd req-5681dfe2-ac53-4dd8-8eee-018b6e7a3966 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Refreshing network info cache for port 199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1824.127513] env[63297]: DEBUG nova.network.neutron [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance_info_cache with network_info: [{"id": "653160f6-b302-49dd-8655-4703b2ac1c6c", "address": "fa:16:3e:d7:4e:28", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap653160f6-b3", "ovs_interfaceid": "653160f6-b302-49dd-8655-4703b2ac1c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.139291] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698390, 'name': CreateVM_Task, 'duration_secs': 0.386846} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.140082] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1824.140799] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.140976] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.141331] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1824.141852] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20ddfdbd-02e1-4ad9-b1e2-e46b2a8aa861 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.147602] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1824.147602] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522ab1c5-94a1-e59d-91a3-56ac73dfea51" [ 1824.147602] env[63297]: _type = "Task" [ 1824.147602] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.156939] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522ab1c5-94a1-e59d-91a3-56ac73dfea51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.500595] env[63297]: DEBUG nova.network.neutron [req-9ed0db61-ce27-4d2b-a51c-fa87ca97f4bd req-5681dfe2-ac53-4dd8-8eee-018b6e7a3966 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updated VIF entry in instance network info cache for port 199fed1c-c158-4c42-85e0-83b3e9035230. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1824.501059] env[63297]: DEBUG nova.network.neutron [req-9ed0db61-ce27-4d2b-a51c-fa87ca97f4bd req-5681dfe2-ac53-4dd8-8eee-018b6e7a3966 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.633524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Releasing lock "refresh_cache-b6a1f66d-783e-4263-b9c4-a4d517ce2923" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.633766] env[63297]: DEBUG nova.objects.instance [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'migration_context' on Instance uuid b6a1f66d-783e-4263-b9c4-a4d517ce2923 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.661233] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522ab1c5-94a1-e59d-91a3-56ac73dfea51, 'name': SearchDatastore_Task, 'duration_secs': 0.01159} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.661544] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.661797] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1824.662043] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.662213] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.662400] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1824.662948] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2310887-49fd-4503-9e77-1c4019e27a5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.672364] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1824.672555] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1824.673335] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64b054e5-fa10-4096-9353-7ebfa4a2e6d4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.680033] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1824.680033] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522c38bc-297b-8bab-187d-13c3eb1283b9" [ 1824.680033] env[63297]: _type = "Task" [ 1824.680033] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.688222] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522c38bc-297b-8bab-187d-13c3eb1283b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.004526] env[63297]: DEBUG oslo_concurrency.lockutils [req-9ed0db61-ce27-4d2b-a51c-fa87ca97f4bd req-5681dfe2-ac53-4dd8-8eee-018b6e7a3966 service nova] Releasing lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.136859] env[63297]: DEBUG nova.objects.base [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1825.137991] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9f8513-0735-4617-82b9-e4d40015a64f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.159397] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3486482-e180-4e69-9b02-110a1e3138a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.166094] env[63297]: DEBUG oslo_vmware.api [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1825.166094] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a9b4ae-2647-41cf-ef50-f949c04b999e" [ 1825.166094] env[63297]: _type = "Task" [ 1825.166094] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.175192] env[63297]: DEBUG oslo_vmware.api [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a9b4ae-2647-41cf-ef50-f949c04b999e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.190017] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522c38bc-297b-8bab-187d-13c3eb1283b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009347} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.190943] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cda5d9b-ed1d-4a29-baa0-7842170d1295 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.197016] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1825.197016] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d9e4a-df43-4647-17f0-a04d99ca6a84" [ 1825.197016] env[63297]: _type = "Task" [ 1825.197016] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.205609] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d9e4a-df43-4647-17f0-a04d99ca6a84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.677203] env[63297]: DEBUG oslo_vmware.api [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52a9b4ae-2647-41cf-ef50-f949c04b999e, 'name': SearchDatastore_Task, 'duration_secs': 0.012168} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.677500] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.677756] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.708207] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522d9e4a-df43-4647-17f0-a04d99ca6a84, 'name': SearchDatastore_Task, 'duration_secs': 0.010597} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.708463] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.708742] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 41d4118d-7621-4ac9-be2f-2664cd691180/41d4118d-7621-4ac9-be2f-2664cd691180.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1825.709053] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffe2196a-e618-49ac-a372-a832daf8f77d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.717848] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1825.717848] env[63297]: value = "task-1698391" [ 1825.717848] env[63297]: _type = "Task" [ 1825.717848] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.726567] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.232802] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698391, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.306712] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a988b8-65f3-4064-9a73-43393858e516 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.315317] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea450f6d-2738-4246-9e0c-acc4d115de20 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.349187] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0428e3c0-e1e1-43ee-82a1-cfa95aa4562b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.358133] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c74fe2-01cc-4e49-aaa7-74405586ca02 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.372968] env[63297]: DEBUG nova.compute.provider_tree [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1826.731670] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698391, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517014} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.731954] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 41d4118d-7621-4ac9-be2f-2664cd691180/41d4118d-7621-4ac9-be2f-2664cd691180.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1826.732211] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1826.732468] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3217084-34fe-4cd0-ae4d-5be71e0309e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.740215] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1826.740215] env[63297]: value = "task-1698392" [ 1826.740215] env[63297]: _type = "Task" [ 1826.740215] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.749995] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698392, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.877409] env[63297]: DEBUG nova.scheduler.client.report [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1827.250881] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698392, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083718} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.251265] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1827.251932] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1887a6a3-31ca-43b8-af70-baa6172a8f00 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.274937] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 41d4118d-7621-4ac9-be2f-2664cd691180/41d4118d-7621-4ac9-be2f-2664cd691180.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1827.275237] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a227bbfa-4dc0-44e0-a2bf-65d9d5b726a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.295915] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1827.295915] env[63297]: value = "task-1698393" [ 1827.295915] env[63297]: _type = "Task" [ 1827.295915] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.305115] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698393, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.808309] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.880832] env[63297]: DEBUG oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52310e8e-f528-793a-8dd9-c8d063a52865/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1827.881762] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63f6613-a1b7-4494-9c39-b2053a2f2afd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.886588] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.209s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.891029] env[63297]: DEBUG oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52310e8e-f528-793a-8dd9-c8d063a52865/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1827.891029] env[63297]: ERROR oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52310e8e-f528-793a-8dd9-c8d063a52865/disk-0.vmdk due to incomplete transfer. [ 1827.891259] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e3ae8c77-a0bf-4495-885e-0986778b8f9d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.899637] env[63297]: DEBUG oslo_vmware.rw_handles [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52310e8e-f528-793a-8dd9-c8d063a52865/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1827.899818] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Uploaded image 2aadb7a8-891e-4a45-b2ae-792004e8b06b to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1827.902166] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1827.902398] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-85d6ced8-92f5-41fe-8a08-647a71e75624 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.909099] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1827.909099] env[63297]: value = "task-1698394" [ 1827.909099] env[63297]: _type = "Task" [ 1827.909099] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.917252] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698394, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.306799] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.418852] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698394, 'name': Destroy_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.445764] env[63297]: INFO nova.scheduler.client.report [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted allocation for migration 170e9d1f-1d98-44ad-b949-2754f5f3dffa [ 1828.752664] env[63297]: INFO nova.compute.manager [None req-20c2e412-865f-45a7-9490-3dbb143ca759 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Get console output [ 1828.752893] env[63297]: WARNING nova.virt.vmwareapi.driver [None req-20c2e412-865f-45a7-9490-3dbb143ca759 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] The console log is missing. Check your VSPC configuration [ 1828.806969] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.919697] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698394, 'name': Destroy_Task, 'duration_secs': 0.53057} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.919982] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Destroyed the VM [ 1828.920241] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1828.920489] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cb2c0f4f-4d48-4800-870c-d7f67250a049 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.927905] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1828.927905] env[63297]: value = "task-1698395" [ 1828.927905] env[63297]: _type = "Task" [ 1828.927905] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.936162] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698395, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.952445] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c1be589b-c6fa-4d13-8eee-a72154ecc044 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.624s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.307145] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.438895] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698395, 'name': RemoveSnapshot_Task, 'duration_secs': 0.487624} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.439105] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1829.439383] env[63297]: DEBUG nova.compute.manager [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1829.440192] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cad531c-c787-4d3b-9c03-a7676c95c076 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.807375] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698393, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.952756] env[63297]: INFO nova.compute.manager [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Shelve offloading [ 1829.954275] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1829.954520] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3458882b-a833-4396-9e7b-ded6198111b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.962918] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1829.962918] env[63297]: value = "task-1698396" [ 1829.962918] env[63297]: _type = "Task" [ 1829.962918] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.974167] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1829.974419] env[63297]: DEBUG nova.compute.manager [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1829.975215] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c747f6-cf7a-4575-bd48-b6f334dfd25a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.981048] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.981229] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.981399] env[63297]: DEBUG nova.network.neutron [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1830.308416] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698393, 'name': ReconfigVM_Task, 'duration_secs': 2.543858} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.308919] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 41d4118d-7621-4ac9-be2f-2664cd691180/41d4118d-7621-4ac9-be2f-2664cd691180.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1830.309314] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eeddae32-29c3-4766-b56a-18c42c8f65ae {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.317559] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1830.317559] env[63297]: value = "task-1698397" [ 1830.317559] env[63297]: _type = "Task" [ 1830.317559] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.326282] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698397, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.765111] env[63297]: DEBUG nova.network.neutron [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [{"id": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "address": "fa:16:3e:78:77:0e", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b49c0c7-27", "ovs_interfaceid": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.827464] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698397, 'name': Rename_Task, 'duration_secs': 0.144373} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.827702] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1830.827966] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-965f64d5-ea35-4856-ba39-28694f425dd6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.836350] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1830.836350] env[63297]: value = "task-1698398" [ 1830.836350] env[63297]: _type = "Task" [ 1830.836350] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.844482] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.268297] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.347382] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698398, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.523975] env[63297]: DEBUG nova.compute.manager [req-960d894a-a3e4-47a9-97d8-96556e9e0b66 req-c8347328-06c8-4293-bdff-7997212ca207 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received event network-vif-unplugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1831.524320] env[63297]: DEBUG oslo_concurrency.lockutils [req-960d894a-a3e4-47a9-97d8-96556e9e0b66 req-c8347328-06c8-4293-bdff-7997212ca207 service nova] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.524616] env[63297]: DEBUG oslo_concurrency.lockutils [req-960d894a-a3e4-47a9-97d8-96556e9e0b66 req-c8347328-06c8-4293-bdff-7997212ca207 service nova] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.524867] env[63297]: DEBUG oslo_concurrency.lockutils [req-960d894a-a3e4-47a9-97d8-96556e9e0b66 req-c8347328-06c8-4293-bdff-7997212ca207 service nova] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.525145] env[63297]: DEBUG nova.compute.manager [req-960d894a-a3e4-47a9-97d8-96556e9e0b66 req-c8347328-06c8-4293-bdff-7997212ca207 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] No waiting events found dispatching network-vif-unplugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1831.525402] env[63297]: WARNING nova.compute.manager [req-960d894a-a3e4-47a9-97d8-96556e9e0b66 req-c8347328-06c8-4293-bdff-7997212ca207 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received unexpected event network-vif-unplugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 for instance with vm_state shelved and task_state shelving_offloading. [ 1831.588553] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1831.589465] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ede787-7f96-42cd-b1ea-d0771d4d9074 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.597980] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1831.598228] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86310c5f-b61f-40f8-8f7c-69008adcf762 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.752681] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1831.753046] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1831.753280] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleting the datastore file [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1831.753567] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04e25ea2-09bd-453d-bc6c-baa8a2b0a57c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.760850] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1831.760850] env[63297]: value = "task-1698400" [ 1831.760850] env[63297]: _type = "Task" [ 1831.760850] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.769472] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.848405] env[63297]: DEBUG oslo_vmware.api [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698398, 'name': PowerOnVM_Task, 'duration_secs': 0.566664} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.848739] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1831.849051] env[63297]: INFO nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Took 10.40 seconds to spawn the instance on the hypervisor. [ 1831.849295] env[63297]: DEBUG nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1831.850183] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c752daf-984d-4893-927c-b139cdcb9e5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.275815] env[63297]: DEBUG oslo_vmware.api [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199653} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.276171] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1832.276449] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1832.276853] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1832.301947] env[63297]: INFO nova.scheduler.client.report [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted allocations for instance d7db24c1-35db-46d5-a406-fbb8c1c5d158 [ 1832.369929] env[63297]: INFO nova.compute.manager [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Took 15.17 seconds to build instance. [ 1832.485502] env[63297]: DEBUG nova.compute.manager [req-5fc43dce-3456-43a4-8fc5-2a906797dccc req-d9b2d1d5-1076-4b38-bb72-f1ca8c124fc0 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Received event network-changed-199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1832.486579] env[63297]: DEBUG nova.compute.manager [req-5fc43dce-3456-43a4-8fc5-2a906797dccc req-d9b2d1d5-1076-4b38-bb72-f1ca8c124fc0 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Refreshing instance network info cache due to event network-changed-199fed1c-c158-4c42-85e0-83b3e9035230. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1832.486579] env[63297]: DEBUG oslo_concurrency.lockutils [req-5fc43dce-3456-43a4-8fc5-2a906797dccc req-d9b2d1d5-1076-4b38-bb72-f1ca8c124fc0 service nova] Acquiring lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.486579] env[63297]: DEBUG oslo_concurrency.lockutils [req-5fc43dce-3456-43a4-8fc5-2a906797dccc req-d9b2d1d5-1076-4b38-bb72-f1ca8c124fc0 service nova] Acquired lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.486579] env[63297]: DEBUG nova.network.neutron [req-5fc43dce-3456-43a4-8fc5-2a906797dccc req-d9b2d1d5-1076-4b38-bb72-f1ca8c124fc0 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Refreshing network info cache for port 199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1832.806811] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.807087] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.807331] env[63297]: DEBUG nova.objects.instance [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'resources' on Instance uuid d7db24c1-35db-46d5-a406-fbb8c1c5d158 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1832.872459] env[63297]: DEBUG oslo_concurrency.lockutils [None req-0a6cc4be-b299-498a-9682-1ef40d1e139d tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "41d4118d-7621-4ac9-be2f-2664cd691180" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.683s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.214817] env[63297]: DEBUG nova.network.neutron [req-5fc43dce-3456-43a4-8fc5-2a906797dccc req-d9b2d1d5-1076-4b38-bb72-f1ca8c124fc0 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updated VIF entry in instance network info cache for port 199fed1c-c158-4c42-85e0-83b3e9035230. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1833.215232] env[63297]: DEBUG nova.network.neutron [req-5fc43dce-3456-43a4-8fc5-2a906797dccc req-d9b2d1d5-1076-4b38-bb72-f1ca8c124fc0 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.309889] env[63297]: DEBUG nova.objects.instance [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'numa_topology' on Instance uuid d7db24c1-35db-46d5-a406-fbb8c1c5d158 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1833.547997] env[63297]: DEBUG nova.compute.manager [req-6068c3d7-1c43-409a-babd-866474c45af3 req-41523f06-0927-4578-a5b7-5f7541455774 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received event network-changed-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1833.548288] env[63297]: DEBUG nova.compute.manager [req-6068c3d7-1c43-409a-babd-866474c45af3 req-41523f06-0927-4578-a5b7-5f7541455774 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Refreshing instance network info cache due to event network-changed-8b49c0c7-27b3-41da-b832-28195da8e8d1. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1833.548440] env[63297]: DEBUG oslo_concurrency.lockutils [req-6068c3d7-1c43-409a-babd-866474c45af3 req-41523f06-0927-4578-a5b7-5f7541455774 service nova] Acquiring lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.548582] env[63297]: DEBUG oslo_concurrency.lockutils [req-6068c3d7-1c43-409a-babd-866474c45af3 req-41523f06-0927-4578-a5b7-5f7541455774 service nova] Acquired lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.548741] env[63297]: DEBUG nova.network.neutron [req-6068c3d7-1c43-409a-babd-866474c45af3 req-41523f06-0927-4578-a5b7-5f7541455774 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Refreshing network info cache for port 8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1833.718364] env[63297]: DEBUG oslo_concurrency.lockutils [req-5fc43dce-3456-43a4-8fc5-2a906797dccc req-d9b2d1d5-1076-4b38-bb72-f1ca8c124fc0 service nova] Releasing lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.812747] env[63297]: DEBUG nova.objects.base [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1833.900959] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6637c017-da7c-4020-8322-6acfa141c51b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.909218] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2638288f-0be5-409f-aea6-bb221c71d6bf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.940542] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a9ffbd-cbaf-4e41-bb4e-5dee61d2d7fb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.948710] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81910a4-2495-4870-a9cc-21b7aac9ad66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.964070] env[63297]: DEBUG nova.compute.provider_tree [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.274303] env[63297]: DEBUG nova.network.neutron [req-6068c3d7-1c43-409a-babd-866474c45af3 req-41523f06-0927-4578-a5b7-5f7541455774 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updated VIF entry in instance network info cache for port 8b49c0c7-27b3-41da-b832-28195da8e8d1. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1834.274676] env[63297]: DEBUG nova.network.neutron [req-6068c3d7-1c43-409a-babd-866474c45af3 req-41523f06-0927-4578-a5b7-5f7541455774 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [{"id": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "address": "fa:16:3e:78:77:0e", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8b49c0c7-27", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.467513] env[63297]: DEBUG nova.scheduler.client.report [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1834.778604] env[63297]: DEBUG oslo_concurrency.lockutils [req-6068c3d7-1c43-409a-babd-866474c45af3 req-41523f06-0927-4578-a5b7-5f7541455774 service nova] Releasing lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.972637] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.280244] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.481293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61ccdd11-ad49-47d8-a2fc-e2781a3a021b tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.478s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.481784] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.202s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.482086] env[63297]: INFO nova.compute.manager [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Unshelving [ 1835.541303] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.541539] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.665869] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.044563] env[63297]: DEBUG nova.compute.utils [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1836.506057] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.506361] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.506578] env[63297]: DEBUG nova.objects.instance [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'pci_requests' on Instance uuid d7db24c1-35db-46d5-a406-fbb8c1c5d158 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1836.546631] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.011077] env[63297]: DEBUG nova.objects.instance [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'numa_topology' on Instance uuid d7db24c1-35db-46d5-a406-fbb8c1c5d158 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1837.514035] env[63297]: INFO nova.compute.claims [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1837.602027] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.602302] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.602538] env[63297]: INFO nova.compute.manager [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Attaching volume 2fa67085-2704-4257-8419-058e1d030e8c to /dev/sdb [ 1837.632783] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32eae749-d822-46de-afaf-f024f915a085 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.640680] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f82e6b5-98f8-4d44-94ed-b0c41e37a2ff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.654606] env[63297]: DEBUG nova.virt.block_device [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Updating existing volume attachment record: bb8a105e-598b-4701-8a07-50ef915949e2 {{(pid=63297) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1838.606829] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe7d2e8-fbbc-4846-9c65-ffc800c9d390 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.615418] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bd379e-00c6-488d-890e-fb0408df6491 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.646195] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a86481-2a70-4d36-9f47-cf40a58fdf21 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.654865] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e40719-ecdb-4279-b765-422ba338c06e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.668195] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.668642] env[63297]: DEBUG nova.compute.provider_tree [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1839.171942] env[63297]: DEBUG nova.scheduler.client.report [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1839.664836] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.665219] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1839.665268] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.677198] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.171s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.705612] env[63297]: INFO nova.network.neutron [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating port 8b49c0c7-27b3-41da-b832-28195da8e8d1 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1840.169068] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.169349] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.169527] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.169682] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1840.171052] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacc076f-9172-4e02-9cba-defa0ba1fcc3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.179118] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd177579-f632-45fd-ba96-723fb9ddcb33 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.192206] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af7a65e-8626-4de3-abb9-29b12c3454fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.198489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54ce635-56d7-4be3-b4f2-f370cc076e7a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.227695] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179971MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1840.227845] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.228048] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.256724] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.256983] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance c257ee27-8d87-4fe6-a953-cc4af1ec36d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.257047] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance d4b2da36-b0fd-47d2-95de-ef4b3f91330f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.257208] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance b6a1f66d-783e-4263-b9c4-a4d517ce2923 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.257382] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 1316bf99-cc93-4d1a-b31c-000dac095b3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.257528] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 41d4118d-7621-4ac9-be2f-2664cd691180 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.257645] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance d7db24c1-35db-46d5-a406-fbb8c1c5d158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.257846] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1841.257986] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1841.348462] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd371a9-87d4-4338-8e35-97ffbe9785a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.356620] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ff8aa4-c00a-40ea-ad6a-e34c9bd116e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.386582] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d862c00f-bce5-4157-b830-6e60d5066b24 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.393976] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa64dcf-05bb-4a13-a044-1b949faf8af7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.406821] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.909611] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1842.197769] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Volume attach. Driver type: vmdk {{(pid=63297) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1842.198026] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354034', 'volume_id': '2fa67085-2704-4257-8419-058e1d030e8c', 'name': 'volume-2fa67085-2704-4257-8419-058e1d030e8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd4b2da36-b0fd-47d2-95de-ef4b3f91330f', 'attached_at': '', 'detached_at': '', 'volume_id': '2fa67085-2704-4257-8419-058e1d030e8c', 'serial': '2fa67085-2704-4257-8419-058e1d030e8c'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1842.198918] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ed0d6e-beb2-44dd-bb34-3c93bbd78532 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.216362] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f47d880-bf61-47cc-8cb2-e0c85b42d9df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.241123] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] volume-2fa67085-2704-4257-8419-058e1d030e8c/volume-2fa67085-2704-4257-8419-058e1d030e8c.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1842.241391] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8821159e-170f-46fe-9d3d-cbcb617860db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.259568] env[63297]: DEBUG oslo_vmware.api [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1842.259568] env[63297]: value = "task-1698403" [ 1842.259568] env[63297]: _type = "Task" [ 1842.259568] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.267404] env[63297]: DEBUG oslo_vmware.api [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698403, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.414550] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1842.414734] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.187s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.769835] env[63297]: DEBUG oslo_vmware.api [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698403, 'name': ReconfigVM_Task, 'duration_secs': 0.357819} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.770063] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Reconfigured VM instance instance-00000072 to attach disk [datastore1] volume-2fa67085-2704-4257-8419-058e1d030e8c/volume-2fa67085-2704-4257-8419-058e1d030e8c.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1842.774810] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88c0b70a-2265-452e-a244-4ec30ba31395 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.792050] env[63297]: DEBUG oslo_vmware.api [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1842.792050] env[63297]: value = "task-1698404" [ 1842.792050] env[63297]: _type = "Task" [ 1842.792050] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.803530] env[63297]: DEBUG oslo_vmware.api [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698404, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.302116] env[63297]: DEBUG oslo_vmware.api [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698404, 'name': ReconfigVM_Task, 'duration_secs': 0.148776} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.302480] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354034', 'volume_id': '2fa67085-2704-4257-8419-058e1d030e8c', 'name': 'volume-2fa67085-2704-4257-8419-058e1d030e8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd4b2da36-b0fd-47d2-95de-ef4b3f91330f', 'attached_at': '', 'detached_at': '', 'volume_id': '2fa67085-2704-4257-8419-058e1d030e8c', 'serial': '2fa67085-2704-4257-8419-058e1d030e8c'} {{(pid=63297) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1843.410264] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.410467] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.916022] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.916022] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1844.342095] env[63297]: DEBUG nova.objects.instance [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'flavor' on Instance uuid d4b2da36-b0fd-47d2-95de-ef4b3f91330f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1844.811513] env[63297]: DEBUG nova.compute.manager [req-5b7738bb-99ac-4a1e-a585-4041b8034d13 req-4d7a9ab8-6cc3-42a1-8088-db06e2755d62 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received event network-vif-plugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1844.811859] env[63297]: DEBUG oslo_concurrency.lockutils [req-5b7738bb-99ac-4a1e-a585-4041b8034d13 req-4d7a9ab8-6cc3-42a1-8088-db06e2755d62 service nova] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.812119] env[63297]: DEBUG oslo_concurrency.lockutils [req-5b7738bb-99ac-4a1e-a585-4041b8034d13 req-4d7a9ab8-6cc3-42a1-8088-db06e2755d62 service nova] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.812299] env[63297]: DEBUG oslo_concurrency.lockutils [req-5b7738bb-99ac-4a1e-a585-4041b8034d13 req-4d7a9ab8-6cc3-42a1-8088-db06e2755d62 service nova] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.812465] env[63297]: DEBUG nova.compute.manager [req-5b7738bb-99ac-4a1e-a585-4041b8034d13 req-4d7a9ab8-6cc3-42a1-8088-db06e2755d62 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] No waiting events found dispatching network-vif-plugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1844.812631] env[63297]: WARNING nova.compute.manager [req-5b7738bb-99ac-4a1e-a585-4041b8034d13 req-4d7a9ab8-6cc3-42a1-8088-db06e2755d62 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received unexpected event network-vif-plugged-8b49c0c7-27b3-41da-b832-28195da8e8d1 for instance with vm_state shelved_offloaded and task_state spawning. [ 1844.847654] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f9141df4-03d3-48bc-b1fd-d783e52fc764 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.245s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.898483] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.898720] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.898901] env[63297]: DEBUG nova.network.neutron [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1845.025982] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.026280] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.529620] env[63297]: INFO nova.compute.manager [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Detaching volume 2fa67085-2704-4257-8419-058e1d030e8c [ 1845.563932] env[63297]: INFO nova.virt.block_device [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Attempting to driver detach volume 2fa67085-2704-4257-8419-058e1d030e8c from mountpoint /dev/sdb [ 1845.564187] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1845.564374] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354034', 'volume_id': '2fa67085-2704-4257-8419-058e1d030e8c', 'name': 'volume-2fa67085-2704-4257-8419-058e1d030e8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd4b2da36-b0fd-47d2-95de-ef4b3f91330f', 'attached_at': '', 'detached_at': '', 'volume_id': '2fa67085-2704-4257-8419-058e1d030e8c', 'serial': '2fa67085-2704-4257-8419-058e1d030e8c'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1845.565720] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f89c06-07ca-4395-9c30-a9e544d1908f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.588929] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35503bc6-b2f9-4e9f-97b5-f3607e63b193 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.595820] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f63e8a4-8b79-40ca-a4e1-7fdf05f37edd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.615123] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790c56d2-5bd1-47d4-ba9b-80fdae1f8846 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.629200] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] The volume has not been displaced from its original location: [datastore1] volume-2fa67085-2704-4257-8419-058e1d030e8c/volume-2fa67085-2704-4257-8419-058e1d030e8c.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1845.634324] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1845.635221] env[63297]: DEBUG nova.network.neutron [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [{"id": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "address": "fa:16:3e:78:77:0e", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b49c0c7-27", "ovs_interfaceid": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.636304] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a555b8f1-6ceb-4791-934a-87c2bdf3d32e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.649241] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.658901] env[63297]: DEBUG oslo_vmware.api [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1845.658901] env[63297]: value = "task-1698405" [ 1845.658901] env[63297]: _type = "Task" [ 1845.658901] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.668672] env[63297]: DEBUG oslo_vmware.api [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698405, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.676608] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='23a53c664d8dae91216e4e9b06623c59',container_format='bare',created_at=2024-12-10T17:32:46Z,direct_url=,disk_format='vmdk',id=2aadb7a8-891e-4a45-b2ae-792004e8b06b,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1158048381-shelved',owner='01fe9157b11244cb86a7626caae0616d',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2024-12-10T17:33:01Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1845.676826] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1845.676982] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1845.677182] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1845.677326] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1845.677470] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1845.677669] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1845.677821] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1845.677979] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1845.678149] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1845.678316] env[63297]: DEBUG nova.virt.hardware [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1845.679085] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfeb0ab5-22a7-4668-b67a-11da63217887 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.687915] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e48009-ca16-4347-ac17-6c3b500ebba4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.701119] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:77:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b49c0c7-27b3-41da-b832-28195da8e8d1', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1845.708561] env[63297]: DEBUG oslo.service.loopingcall [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.708797] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1845.709015] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57585b49-4c56-4fe7-8eed-040d1138f6ac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.728695] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1845.728695] env[63297]: value = "task-1698406" [ 1845.728695] env[63297]: _type = "Task" [ 1845.728695] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.736018] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698406, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.168750] env[63297]: DEBUG oslo_vmware.api [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698405, 'name': ReconfigVM_Task, 'duration_secs': 0.216793} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.168977] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1846.173578] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-664ce772-dee4-4c90-ab70-c8cc2509b816 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.189596] env[63297]: DEBUG oslo_vmware.api [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1846.189596] env[63297]: value = "task-1698407" [ 1846.189596] env[63297]: _type = "Task" [ 1846.189596] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.197724] env[63297]: DEBUG oslo_vmware.api [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.237949] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698406, 'name': CreateVM_Task, 'duration_secs': 0.324231} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.238244] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1846.238791] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.238954] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.239370] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1846.239637] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e00eb5e-a758-4889-a993-b7120d5bae1f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.243946] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1846.243946] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523e3aed-aaac-ae1e-380c-23ec35560f28" [ 1846.243946] env[63297]: _type = "Task" [ 1846.243946] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.251503] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523e3aed-aaac-ae1e-380c-23ec35560f28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.455171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.455333] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.455482] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1846.699267] env[63297]: DEBUG oslo_vmware.api [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698407, 'name': ReconfigVM_Task, 'duration_secs': 0.136872} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.699577] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354034', 'volume_id': '2fa67085-2704-4257-8419-058e1d030e8c', 'name': 'volume-2fa67085-2704-4257-8419-058e1d030e8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd4b2da36-b0fd-47d2-95de-ef4b3f91330f', 'attached_at': '', 'detached_at': '', 'volume_id': '2fa67085-2704-4257-8419-058e1d030e8c', 'serial': '2fa67085-2704-4257-8419-058e1d030e8c'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1846.754449] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.754717] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Processing image 2aadb7a8-891e-4a45-b2ae-792004e8b06b {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1846.754951] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b/2aadb7a8-891e-4a45-b2ae-792004e8b06b.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.755114] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b/2aadb7a8-891e-4a45-b2ae-792004e8b06b.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.755295] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1846.755541] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0e1015d-e708-47ed-86b0-6dbe7456a544 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.763493] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1846.763658] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1846.764391] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2f4bc5c-3e54-40da-99c5-880ab8dd752e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.769282] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1846.769282] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]523a4f42-7db5-2d30-deac-34899fca2853" [ 1846.769282] env[63297]: _type = "Task" [ 1846.769282] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.776486] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]523a4f42-7db5-2d30-deac-34899fca2853, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.834908] env[63297]: DEBUG nova.compute.manager [req-731b853a-9535-40bb-ba52-a8575d613d9b req-0508a0c8-a7a0-4c2c-9178-0659de67920c service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received event network-changed-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1846.835119] env[63297]: DEBUG nova.compute.manager [req-731b853a-9535-40bb-ba52-a8575d613d9b req-0508a0c8-a7a0-4c2c-9178-0659de67920c service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Refreshing instance network info cache due to event network-changed-8b49c0c7-27b3-41da-b832-28195da8e8d1. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1846.835324] env[63297]: DEBUG oslo_concurrency.lockutils [req-731b853a-9535-40bb-ba52-a8575d613d9b req-0508a0c8-a7a0-4c2c-9178-0659de67920c service nova] Acquiring lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.835464] env[63297]: DEBUG oslo_concurrency.lockutils [req-731b853a-9535-40bb-ba52-a8575d613d9b req-0508a0c8-a7a0-4c2c-9178-0659de67920c service nova] Acquired lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.835625] env[63297]: DEBUG nova.network.neutron [req-731b853a-9535-40bb-ba52-a8575d613d9b req-0508a0c8-a7a0-4c2c-9178-0659de67920c service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Refreshing network info cache for port 8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1847.239040] env[63297]: DEBUG nova.objects.instance [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'flavor' on Instance uuid d4b2da36-b0fd-47d2-95de-ef4b3f91330f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1847.279101] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Preparing fetch location {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1847.279340] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Fetch image to [datastore1] OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7/OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7.vmdk {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1847.279520] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Downloading stream optimized image 2aadb7a8-891e-4a45-b2ae-792004e8b06b to [datastore1] OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7/OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7.vmdk on the data store datastore1 as vApp {{(pid=63297) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1847.279687] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Downloading image file data 2aadb7a8-891e-4a45-b2ae-792004e8b06b to the ESX as VM named 'OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7' {{(pid=63297) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1847.343865] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1847.343865] env[63297]: value = "resgroup-9" [ 1847.343865] env[63297]: _type = "ResourcePool" [ 1847.343865] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1847.344134] env[63297]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-215570e4-4ab8-4dd3-9412-f7c692103983 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.364597] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lease: (returnval){ [ 1847.364597] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a5e4-f83f-bace-05dd-0170b704a311" [ 1847.364597] env[63297]: _type = "HttpNfcLease" [ 1847.364597] env[63297]: } obtained for vApp import into resource pool (val){ [ 1847.364597] env[63297]: value = "resgroup-9" [ 1847.364597] env[63297]: _type = "ResourcePool" [ 1847.364597] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1847.364865] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the lease: (returnval){ [ 1847.364865] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a5e4-f83f-bace-05dd-0170b704a311" [ 1847.364865] env[63297]: _type = "HttpNfcLease" [ 1847.364865] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1847.372934] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1847.372934] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a5e4-f83f-bace-05dd-0170b704a311" [ 1847.372934] env[63297]: _type = "HttpNfcLease" [ 1847.372934] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1847.580194] env[63297]: DEBUG nova.network.neutron [req-731b853a-9535-40bb-ba52-a8575d613d9b req-0508a0c8-a7a0-4c2c-9178-0659de67920c service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updated VIF entry in instance network info cache for port 8b49c0c7-27b3-41da-b832-28195da8e8d1. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1847.580612] env[63297]: DEBUG nova.network.neutron [req-731b853a-9535-40bb-ba52-a8575d613d9b req-0508a0c8-a7a0-4c2c-9178-0659de67920c service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [{"id": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "address": "fa:16:3e:78:77:0e", "network": {"id": "152013ff-e9e7-48d2-8c88-f8cdc0504e9d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1340209449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01fe9157b11244cb86a7626caae0616d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b49c0c7-27", "ovs_interfaceid": "8b49c0c7-27b3-41da-b832-28195da8e8d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.670425] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updating instance_info_cache with network_info: [{"id": "a995a9af-3d84-43dd-8695-17446ea38fc8", "address": "fa:16:3e:8b:0b:e5", "network": {"id": "e1105d5d-15f9-4bdb-b2b6-d4e654a33f1e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-299384608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc81b0f87c64b2283eb0ece21fb31a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa995a9af-3d", "ovs_interfaceid": "a995a9af-3d84-43dd-8695-17446ea38fc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.874521] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1847.874521] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a5e4-f83f-bace-05dd-0170b704a311" [ 1847.874521] env[63297]: _type = "HttpNfcLease" [ 1847.874521] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1848.083413] env[63297]: DEBUG oslo_concurrency.lockutils [req-731b853a-9535-40bb-ba52-a8575d613d9b req-0508a0c8-a7a0-4c2c-9178-0659de67920c service nova] Releasing lock "refresh_cache-d7db24c1-35db-46d5-a406-fbb8c1c5d158" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.173493] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-c257ee27-8d87-4fe6-a953-cc4af1ec36d6" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.173665] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1848.173869] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1848.174040] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1848.174202] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1848.246039] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2a6b0b97-b6f7-4a8f-9ee3-bf5bc606a96e tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.220s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.375703] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1848.375703] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a5e4-f83f-bace-05dd-0170b704a311" [ 1848.375703] env[63297]: _type = "HttpNfcLease" [ 1848.375703] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1848.375984] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1848.375984] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d9a5e4-f83f-bace-05dd-0170b704a311" [ 1848.375984] env[63297]: _type = "HttpNfcLease" [ 1848.375984] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1848.376754] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7414eb-56dd-4a54-8a3b-be24cd58674f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.384257] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529429bb-2cf9-7374-67cb-0a7c8ecf3ba0/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1848.384437] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529429bb-2cf9-7374-67cb-0a7c8ecf3ba0/disk-0.vmdk. {{(pid=63297) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1848.463490] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a536b00c-ba6d-4d51-a810-3addd93563df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.271063] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.271491] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.271491] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.271704] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.271830] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.275625] env[63297]: INFO nova.compute.manager [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Terminating instance [ 1849.277673] env[63297]: DEBUG nova.compute.manager [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1849.277877] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1849.278771] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a97cb46-72e7-4fbb-b768-6f95e86d089c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.289079] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1849.289340] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5dedd86c-b709-4752-b0c7-fbc1741f676c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.300737] env[63297]: DEBUG oslo_vmware.api [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1849.300737] env[63297]: value = "task-1698409" [ 1849.300737] env[63297]: _type = "Task" [ 1849.300737] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.314511] env[63297]: DEBUG oslo_vmware.api [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698409, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.630077] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Completed reading data from the image iterator. {{(pid=63297) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1849.630077] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529429bb-2cf9-7374-67cb-0a7c8ecf3ba0/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1849.630517] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5dbbfd-a43f-4a0f-8b4b-62f2f9f405b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.637301] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529429bb-2cf9-7374-67cb-0a7c8ecf3ba0/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1849.637450] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529429bb-2cf9-7374-67cb-0a7c8ecf3ba0/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1849.637696] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-17060c58-c92d-4773-a6c6-d3b8916c3085 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.812222] env[63297]: DEBUG oslo_vmware.api [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698409, 'name': PowerOffVM_Task, 'duration_secs': 0.354368} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.812519] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1849.812710] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1849.813285] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e97cd385-7d81-4e64-99a9-27e914a05b7c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.821874] env[63297]: DEBUG oslo_vmware.rw_handles [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529429bb-2cf9-7374-67cb-0a7c8ecf3ba0/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1849.822078] env[63297]: INFO nova.virt.vmwareapi.images [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Downloaded image file data 2aadb7a8-891e-4a45-b2ae-792004e8b06b [ 1849.822842] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e732260-506f-4a3f-9d33-e7f39b8e20f5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.837606] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb2361aa-42f6-46f0-9bd9-8e3a22e263cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.884119] env[63297]: INFO nova.virt.vmwareapi.images [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] The imported VM was unregistered [ 1849.886634] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Caching image {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1849.886879] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Creating directory with path [datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1849.887170] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e483035-2f9b-4648-955c-f190bcaaa740 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.897567] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Created directory with path [datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1849.897785] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7/OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7.vmdk to [datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b/2aadb7a8-891e-4a45-b2ae-792004e8b06b.vmdk. {{(pid=63297) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1849.897980] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-275db02c-3e68-480c-9fd2-4650513ce178 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.905327] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1849.905327] env[63297]: value = "task-1698412" [ 1849.905327] env[63297]: _type = "Task" [ 1849.905327] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.913396] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698412, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.989039] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1849.989039] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1849.989296] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleting the datastore file [datastore1] d4b2da36-b0fd-47d2-95de-ef4b3f91330f {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1849.989453] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f87e4a2-bd34-4c29-9821-381c2dd5e171 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.997050] env[63297]: DEBUG oslo_vmware.api [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for the task: (returnval){ [ 1849.997050] env[63297]: value = "task-1698413" [ 1849.997050] env[63297]: _type = "Task" [ 1849.997050] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.003923] env[63297]: DEBUG oslo_vmware.api [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.415766] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698412, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.506582] env[63297]: DEBUG oslo_vmware.api [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.916322] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698412, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.008632] env[63297]: DEBUG oslo_vmware.api [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Task: {'id': task-1698413, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.823702} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.009055] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1851.009055] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1851.009287] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1851.009463] env[63297]: INFO nova.compute.manager [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Took 1.73 seconds to destroy the instance on the hypervisor. [ 1851.009792] env[63297]: DEBUG oslo.service.loopingcall [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1851.010025] env[63297]: DEBUG nova.compute.manager [-] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1851.010123] env[63297]: DEBUG nova.network.neutron [-] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1851.417038] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698412, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.513860] env[63297]: DEBUG nova.compute.manager [req-13727284-dfaf-48da-891b-f1c8fd7a1d15 req-30a9515d-0dd0-4f11-a225-82812733a615 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Received event network-vif-deleted-be020d1c-0688-4a14-b8d7-7d51dc77c7d5 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1851.513860] env[63297]: INFO nova.compute.manager [req-13727284-dfaf-48da-891b-f1c8fd7a1d15 req-30a9515d-0dd0-4f11-a225-82812733a615 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Neutron deleted interface be020d1c-0688-4a14-b8d7-7d51dc77c7d5; detaching it from the instance and deleting it from the info cache [ 1851.513860] env[63297]: DEBUG nova.network.neutron [req-13727284-dfaf-48da-891b-f1c8fd7a1d15 req-30a9515d-0dd0-4f11-a225-82812733a615 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.919430] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698412, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.983079] env[63297]: DEBUG nova.network.neutron [-] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.016433] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86a58ccf-511d-4a21-8255-039b2b476beb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.026167] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b6f6ae-a7dc-4798-8786-9afbf2b9a66d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.057043] env[63297]: DEBUG nova.compute.manager [req-13727284-dfaf-48da-891b-f1c8fd7a1d15 req-30a9515d-0dd0-4f11-a225-82812733a615 service nova] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Detach interface failed, port_id=be020d1c-0688-4a14-b8d7-7d51dc77c7d5, reason: Instance d4b2da36-b0fd-47d2-95de-ef4b3f91330f could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1852.417756] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698412, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.240131} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.418140] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7/OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7.vmdk to [datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b/2aadb7a8-891e-4a45-b2ae-792004e8b06b.vmdk. [ 1852.418194] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Cleaning up location [datastore1] OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7 {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1852.418362] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9dcb1fba-1042-4173-861d-a5fd1e5137a7 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1852.418599] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ba0dc2e-c4e7-496c-9b36-ea2f51c21dee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.424751] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1852.424751] env[63297]: value = "task-1698414" [ 1852.424751] env[63297]: _type = "Task" [ 1852.424751] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.432010] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698414, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.485888] env[63297]: INFO nova.compute.manager [-] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Took 1.48 seconds to deallocate network for instance. [ 1852.934888] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698414, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09379} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.935156] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1852.935325] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b/2aadb7a8-891e-4a45-b2ae-792004e8b06b.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.935571] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b/2aadb7a8-891e-4a45-b2ae-792004e8b06b.vmdk to [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158/d7db24c1-35db-46d5-a406-fbb8c1c5d158.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1852.935839] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92573098-f17b-4dec-a06e-fc910985dd82 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.943939] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1852.943939] env[63297]: value = "task-1698415" [ 1852.943939] env[63297]: _type = "Task" [ 1852.943939] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.952564] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698415, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.992522] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.992775] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.993022] env[63297]: DEBUG nova.objects.instance [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lazy-loading 'resources' on Instance uuid d4b2da36-b0fd-47d2-95de-ef4b3f91330f {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1853.454878] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698415, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.600804] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0236c58-39ce-4703-bc99-0dc62b890000 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.608060] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8101908b-fd45-44ba-b6d5-f26827a1efc8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.638667] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24867f36-eb85-47aa-90ec-1766e9a8a4d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.646667] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99936156-c567-4f57-993a-d1e9009c429d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.660637] env[63297]: DEBUG nova.compute.provider_tree [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1853.958389] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698415, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.163954] env[63297]: DEBUG nova.scheduler.client.report [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1854.456363] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698415, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.669829] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.693909] env[63297]: INFO nova.scheduler.client.report [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Deleted allocations for instance d4b2da36-b0fd-47d2-95de-ef4b3f91330f [ 1854.956622] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698415, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.202994] env[63297]: DEBUG oslo_concurrency.lockutils [None req-875eb89a-19e2-46b3-ab08-bb244dff58f5 tempest-AttachVolumeNegativeTest-184052581 tempest-AttachVolumeNegativeTest-184052581-project-member] Lock "d4b2da36-b0fd-47d2-95de-ef4b3f91330f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.932s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.269971] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.270260] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.270441] env[63297]: INFO nova.compute.manager [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Shelving [ 1855.457430] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698415, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.778073] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1855.778363] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dbe16ff-b206-4a74-805c-4a7b8451ed64 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.785453] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1855.785453] env[63297]: value = "task-1698417" [ 1855.785453] env[63297]: _type = "Task" [ 1855.785453] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.793858] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.958043] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698415, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.861552} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.958965] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2aadb7a8-891e-4a45-b2ae-792004e8b06b/2aadb7a8-891e-4a45-b2ae-792004e8b06b.vmdk to [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158/d7db24c1-35db-46d5-a406-fbb8c1c5d158.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1855.959597] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be845a0b-b378-499e-9435-6c729252d479 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.982636] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158/d7db24c1-35db-46d5-a406-fbb8c1c5d158.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1855.982937] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e7c3c88-97a9-4909-8dbf-cc994e9865f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.002934] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1856.002934] env[63297]: value = "task-1698418" [ 1856.002934] env[63297]: _type = "Task" [ 1856.002934] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.011188] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698418, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.295584] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698417, 'name': PowerOffVM_Task, 'duration_secs': 0.203642} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.295843] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1856.296610] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709c89eb-368a-4f88-b5ee-f8b2a45193d6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.314735] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31fb1c6-052f-4631-ac60-e60910eecbdf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.513018] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698418, 'name': ReconfigVM_Task, 'duration_secs': 0.278377} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.513317] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Reconfigured VM instance instance-00000071 to attach disk [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158/d7db24c1-35db-46d5-a406-fbb8c1c5d158.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.514167] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cae17c62-f1ab-4821-b3fa-296a83d73e28 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.520446] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1856.520446] env[63297]: value = "task-1698419" [ 1856.520446] env[63297]: _type = "Task" [ 1856.520446] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.529389] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698419, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.824511] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Creating Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1856.824799] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c6dd0f46-3742-45a8-9c60-930d56b720ea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.832999] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1856.832999] env[63297]: value = "task-1698420" [ 1856.832999] env[63297]: _type = "Task" [ 1856.832999] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.841645] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698420, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.950483] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.950757] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.950979] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.951189] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.951362] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.953509] env[63297]: INFO nova.compute.manager [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Terminating instance [ 1856.955311] env[63297]: DEBUG nova.compute.manager [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1856.955556] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1856.955797] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73f6d5d7-2dd1-4cec-81cd-0acfe7fb31f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.962680] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1856.962680] env[63297]: value = "task-1698421" [ 1856.962680] env[63297]: _type = "Task" [ 1856.962680] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.971191] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.031121] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698419, 'name': Rename_Task, 'duration_secs': 0.178414} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.031453] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1857.031746] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf61958a-1458-4c58-ac83-a96a84a14e35 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.038792] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1857.038792] env[63297]: value = "task-1698422" [ 1857.038792] env[63297]: _type = "Task" [ 1857.038792] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.047096] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.342727] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698420, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.472148] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698421, 'name': PowerOffVM_Task, 'duration_secs': 0.20175} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.472349] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1857.472545] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Volume detach. Driver type: vmdk {{(pid=63297) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1857.472732] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354025', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'name': 'volume-a946ca63-9887-4bf6-9a42-97242baeac81', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'b6a1f66d-783e-4263-b9c4-a4d517ce2923', 'attached_at': '2024-12-10T17:32:52.000000', 'detached_at': '', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'serial': 'a946ca63-9887-4bf6-9a42-97242baeac81'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1857.473502] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee597a6-b896-4366-b545-e1d076b40f71 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.491508] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa9a69b-a98b-4d75-9342-bf00626adf7f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.498369] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b399a666-5c8f-4073-8e94-ccbd8b01d2e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.515584] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bda0c2-428d-4d1e-b18b-f199ff60dbac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.530231] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] The volume has not been displaced from its original location: [datastore1] volume-a946ca63-9887-4bf6-9a42-97242baeac81/volume-a946ca63-9887-4bf6-9a42-97242baeac81.vmdk. No consolidation needed. {{(pid=63297) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1857.535699] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1857.535993] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-219fadab-33e4-4a3d-8ef4-baabb51b0703 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.561134] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698422, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.562731] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1857.562731] env[63297]: value = "task-1698423" [ 1857.562731] env[63297]: _type = "Task" [ 1857.562731] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.572552] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698423, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.843985] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698420, 'name': CreateSnapshot_Task, 'duration_secs': 0.881927} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.844266] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Created Snapshot of the VM instance {{(pid=63297) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1857.844991] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e2e4ed-c522-4791-8bf4-da5f0759c5e2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.059332] env[63297]: DEBUG oslo_vmware.api [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698422, 'name': PowerOnVM_Task, 'duration_secs': 0.643955} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.059527] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1858.071230] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698423, 'name': ReconfigVM_Task, 'duration_secs': 0.165905} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.071473] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1858.076138] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35824190-599b-4c84-abec-4075f8e26723 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.090979] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1858.090979] env[63297]: value = "task-1698424" [ 1858.090979] env[63297]: _type = "Task" [ 1858.090979] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.098521] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698424, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.165383] env[63297]: DEBUG nova.compute.manager [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1858.166338] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5a6d94-c9b1-4a89-8a17-eb3c133cd17b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.362134] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Creating linked-clone VM from snapshot {{(pid=63297) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1858.362438] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f99aa1ba-ba58-430e-b3fe-253304d8bfc8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.370677] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1858.370677] env[63297]: value = "task-1698425" [ 1858.370677] env[63297]: _type = "Task" [ 1858.370677] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.379092] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698425, 'name': CloneVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.600987] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698424, 'name': ReconfigVM_Task, 'duration_secs': 0.384965} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.601450] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-354025', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'name': 'volume-a946ca63-9887-4bf6-9a42-97242baeac81', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'b6a1f66d-783e-4263-b9c4-a4d517ce2923', 'attached_at': '2024-12-10T17:32:52.000000', 'detached_at': '', 'volume_id': 'a946ca63-9887-4bf6-9a42-97242baeac81', 'serial': 'a946ca63-9887-4bf6-9a42-97242baeac81'} {{(pid=63297) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1858.601672] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1858.602371] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edc10d5-9ec4-485b-8b58-f006b26958a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.608644] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1858.608862] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-013cf254-fd69-4f15-9d63-89e8bfaf1b6f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.682752] env[63297]: DEBUG oslo_concurrency.lockutils [None req-900871b3-304f-4e3d-b8c2-626801015655 tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.201s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.744181] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1858.744377] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1858.744538] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleting the datastore file [datastore1] b6a1f66d-783e-4263-b9c4-a4d517ce2923 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1858.744827] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-491ed9b4-5e8d-4be9-a23a-676339a096b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.751248] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1858.751248] env[63297]: value = "task-1698427" [ 1858.751248] env[63297]: _type = "Task" [ 1858.751248] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.759441] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.882614] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698425, 'name': CloneVM_Task} progress is 94%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.261287] env[63297]: DEBUG oslo_vmware.api [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094777} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.261531] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1859.261727] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1859.261901] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1859.262090] env[63297]: INFO nova.compute.manager [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Took 2.31 seconds to destroy the instance on the hypervisor. [ 1859.262335] env[63297]: DEBUG oslo.service.loopingcall [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1859.262525] env[63297]: DEBUG nova.compute.manager [-] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1859.262619] env[63297]: DEBUG nova.network.neutron [-] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1859.380778] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698425, 'name': CloneVM_Task} progress is 95%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.729336] env[63297]: DEBUG nova.compute.manager [req-fde050b0-3052-4404-8c56-1e0e37a40a1f req-e4632463-7880-4aab-ae93-87ae84975574 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Received event network-vif-deleted-653160f6-b302-49dd-8655-4703b2ac1c6c {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1859.729764] env[63297]: INFO nova.compute.manager [req-fde050b0-3052-4404-8c56-1e0e37a40a1f req-e4632463-7880-4aab-ae93-87ae84975574 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Neutron deleted interface 653160f6-b302-49dd-8655-4703b2ac1c6c; detaching it from the instance and deleting it from the info cache [ 1859.729764] env[63297]: DEBUG nova.network.neutron [req-fde050b0-3052-4404-8c56-1e0e37a40a1f req-e4632463-7880-4aab-ae93-87ae84975574 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.885685] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698425, 'name': CloneVM_Task, 'duration_secs': 1.123656} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.885685] env[63297]: INFO nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Created linked-clone VM from snapshot [ 1859.885685] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c192bae9-354c-4dfe-9b0b-fe64731a59bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.892949] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Uploading image 9334e852-eb03-4385-8cae-1fb5b4d5a3e5 {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1859.924169] env[63297]: DEBUG oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1859.924169] env[63297]: value = "vm-354038" [ 1859.924169] env[63297]: _type = "VirtualMachine" [ 1859.924169] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1859.924455] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9cdb4f2e-fc9a-4df5-b47c-19d9fdae58a8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.935898] env[63297]: DEBUG oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease: (returnval){ [ 1859.935898] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c4ed88-ac6d-1653-568b-40f52ddce4d1" [ 1859.935898] env[63297]: _type = "HttpNfcLease" [ 1859.935898] env[63297]: } obtained for exporting VM: (result){ [ 1859.935898] env[63297]: value = "vm-354038" [ 1859.935898] env[63297]: _type = "VirtualMachine" [ 1859.935898] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1859.936320] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the lease: (returnval){ [ 1859.936320] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c4ed88-ac6d-1653-568b-40f52ddce4d1" [ 1859.936320] env[63297]: _type = "HttpNfcLease" [ 1859.936320] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1859.943652] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1859.943652] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c4ed88-ac6d-1653-568b-40f52ddce4d1" [ 1859.943652] env[63297]: _type = "HttpNfcLease" [ 1859.943652] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1860.202030] env[63297]: DEBUG nova.network.neutron [-] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.233710] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-582c3dd7-420c-4057-aa9b-1700e7746438 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.243867] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4abc45e-6834-4edb-8e55-4ff9dcda177a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.274789] env[63297]: DEBUG nova.compute.manager [req-fde050b0-3052-4404-8c56-1e0e37a40a1f req-e4632463-7880-4aab-ae93-87ae84975574 service nova] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Detach interface failed, port_id=653160f6-b302-49dd-8655-4703b2ac1c6c, reason: Instance b6a1f66d-783e-4263-b9c4-a4d517ce2923 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1860.444936] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1860.444936] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c4ed88-ac6d-1653-568b-40f52ddce4d1" [ 1860.444936] env[63297]: _type = "HttpNfcLease" [ 1860.444936] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1860.444936] env[63297]: DEBUG oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1860.444936] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c4ed88-ac6d-1653-568b-40f52ddce4d1" [ 1860.444936] env[63297]: _type = "HttpNfcLease" [ 1860.444936] env[63297]: }. {{(pid=63297) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1860.445781] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297e1c0a-cf04-4d4f-a6c1-a813bd2b4c45 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.454101] env[63297]: DEBUG oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e40efb-43ef-f9e6-2c1c-9052be949ccc/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1860.454318] env[63297]: DEBUG oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e40efb-43ef-f9e6-2c1c-9052be949ccc/disk-0.vmdk for reading. {{(pid=63297) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1860.546690] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e0e694a0-e7cd-4926-8b5f-d40659becaac {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.704714] env[63297]: INFO nova.compute.manager [-] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Took 1.44 seconds to deallocate network for instance. [ 1861.246327] env[63297]: INFO nova.compute.manager [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Took 0.54 seconds to detach 1 volumes for instance. [ 1861.249101] env[63297]: DEBUG nova.compute.manager [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Deleting volume: a946ca63-9887-4bf6-9a42-97242baeac81 {{(pid=63297) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1861.785222] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.785565] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.785856] env[63297]: DEBUG nova.objects.instance [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'resources' on Instance uuid b6a1f66d-783e-4263-b9c4-a4d517ce2923 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1862.383381] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080121f8-503e-44b3-86f0-00934c9dbd7e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.392365] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891d7728-10dc-4683-96be-1502b19e0c34 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.423813] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f35411-a4f6-449e-aff5-0a79d3738b25 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.431445] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034aaaf3-bac2-4e79-8085-0f0031792ec1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.446368] env[63297]: DEBUG nova.compute.provider_tree [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1862.951041] env[63297]: DEBUG nova.scheduler.client.report [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1863.456953] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.476371] env[63297]: INFO nova.scheduler.client.report [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted allocations for instance b6a1f66d-783e-4263-b9c4-a4d517ce2923 [ 1863.984118] env[63297]: DEBUG oslo_concurrency.lockutils [None req-c7133659-3e49-452b-8c34-3269665a262c tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "b6a1f66d-783e-4263-b9c4-a4d517ce2923" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.033s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.710557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.710557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.710557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.710557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.710557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.712852] env[63297]: INFO nova.compute.manager [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Terminating instance [ 1864.714327] env[63297]: DEBUG nova.compute.manager [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1864.714530] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1864.715407] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fa186e-7a67-4a8b-8f2f-0e24d2ce695b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.723920] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1864.723920] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-934ee692-8597-4faf-841d-624013222213 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.732145] env[63297]: DEBUG oslo_vmware.api [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1864.732145] env[63297]: value = "task-1698432" [ 1864.732145] env[63297]: _type = "Task" [ 1864.732145] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.741587] env[63297]: DEBUG oslo_vmware.api [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.242459] env[63297]: DEBUG oslo_vmware.api [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698432, 'name': PowerOffVM_Task, 'duration_secs': 0.297334} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.242713] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1865.242882] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1865.243154] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec006465-e7eb-4f96-bd1b-026c81f3037d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.402905] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1865.403124] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1865.403317] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleting the datastore file [datastore1] c257ee27-8d87-4fe6-a953-cc4af1ec36d6 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1865.403588] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c56c1db-26f8-4fd5-ac75-cf02665973d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.409934] env[63297]: DEBUG oslo_vmware.api [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1865.409934] env[63297]: value = "task-1698434" [ 1865.409934] env[63297]: _type = "Task" [ 1865.409934] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.418414] env[63297]: DEBUG oslo_vmware.api [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.921166] env[63297]: DEBUG oslo_vmware.api [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212799} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.921556] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1865.921803] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1865.922143] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1865.922409] env[63297]: INFO nova.compute.manager [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1865.922678] env[63297]: DEBUG oslo.service.loopingcall [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1865.922874] env[63297]: DEBUG nova.compute.manager [-] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1865.922970] env[63297]: DEBUG nova.network.neutron [-] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1866.162502] env[63297]: DEBUG nova.compute.manager [req-e90fb07e-a07f-4552-b62c-7e6de251ac4d req-864c030f-2f12-4421-9498-0f37a9ec23ad service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Received event network-vif-deleted-a995a9af-3d84-43dd-8695-17446ea38fc8 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1866.162764] env[63297]: INFO nova.compute.manager [req-e90fb07e-a07f-4552-b62c-7e6de251ac4d req-864c030f-2f12-4421-9498-0f37a9ec23ad service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Neutron deleted interface a995a9af-3d84-43dd-8695-17446ea38fc8; detaching it from the instance and deleting it from the info cache [ 1866.163035] env[63297]: DEBUG nova.network.neutron [req-e90fb07e-a07f-4552-b62c-7e6de251ac4d req-864c030f-2f12-4421-9498-0f37a9ec23ad service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.638535] env[63297]: DEBUG nova.network.neutron [-] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.665905] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a02509b-79e5-4bdd-9b92-9c7578841598 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.675844] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ad03e5-65e1-4e76-9981-1fdd4b3882b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.704121] env[63297]: DEBUG nova.compute.manager [req-e90fb07e-a07f-4552-b62c-7e6de251ac4d req-864c030f-2f12-4421-9498-0f37a9ec23ad service nova] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Detach interface failed, port_id=a995a9af-3d84-43dd-8695-17446ea38fc8, reason: Instance c257ee27-8d87-4fe6-a953-cc4af1ec36d6 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1867.141809] env[63297]: INFO nova.compute.manager [-] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Took 1.22 seconds to deallocate network for instance. [ 1867.333132] env[63297]: DEBUG oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e40efb-43ef-f9e6-2c1c-9052be949ccc/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1867.334015] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6cda2b-8cca-46e1-a306-5f5323453cb9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.340369] env[63297]: DEBUG oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e40efb-43ef-f9e6-2c1c-9052be949ccc/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1867.340533] env[63297]: ERROR oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e40efb-43ef-f9e6-2c1c-9052be949ccc/disk-0.vmdk due to incomplete transfer. [ 1867.340735] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9581c5e0-e1a6-4b88-a3ed-4b6afd634ff5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.347788] env[63297]: DEBUG oslo_vmware.rw_handles [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e40efb-43ef-f9e6-2c1c-9052be949ccc/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1867.347971] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Uploaded image 9334e852-eb03-4385-8cae-1fb5b4d5a3e5 to the Glance image server {{(pid=63297) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1867.350360] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Destroying the VM {{(pid=63297) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1867.350561] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-16e5c020-595c-4c50-b7dc-44f4456ebce2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.356371] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1867.356371] env[63297]: value = "task-1698435" [ 1867.356371] env[63297]: _type = "Task" [ 1867.356371] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.364981] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698435, 'name': Destroy_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.648090] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.648373] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.648599] env[63297]: DEBUG nova.objects.instance [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'resources' on Instance uuid c257ee27-8d87-4fe6-a953-cc4af1ec36d6 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1867.865900] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698435, 'name': Destroy_Task, 'duration_secs': 0.352236} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.866208] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Destroyed the VM [ 1867.866387] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Deleting Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1867.866619] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1ee13225-87d0-48db-a606-9fed2a8a13e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.872967] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1867.872967] env[63297]: value = "task-1698436" [ 1867.872967] env[63297]: _type = "Task" [ 1867.872967] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.879905] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698436, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.231339] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b26c02-5c1a-4c5c-b0df-abda106d8fa4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.238868] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d5b050-7ac1-4bba-a2a4-8a057eff79c5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.268524] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc906d0f-ee60-485c-b894-7e939dd4532f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.275711] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e908bb1-b758-42d6-8a6b-fd02e7639a03 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.288143] env[63297]: DEBUG nova.compute.provider_tree [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.382153] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698436, 'name': RemoveSnapshot_Task, 'duration_secs': 0.369902} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.382421] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Deleted Snapshot of the VM instance {{(pid=63297) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1868.382683] env[63297]: DEBUG nova.compute.manager [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1868.383437] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f4acc3-c559-4053-bf4d-2561960984fa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.791887] env[63297]: DEBUG nova.scheduler.client.report [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1868.894780] env[63297]: INFO nova.compute.manager [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Shelve offloading [ 1868.896299] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1868.896539] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd7c590d-a432-471a-81d3-d257429a7f16 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.904570] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1868.904570] env[63297]: value = "task-1698437" [ 1868.904570] env[63297]: _type = "Task" [ 1868.904570] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.914471] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] VM already powered off {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1868.914648] env[63297]: DEBUG nova.compute.manager [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1868.915355] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c1a014-2cc0-4641-b5c8-f1e8755951ff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.920898] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.921067] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.921253] env[63297]: DEBUG nova.network.neutron [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1869.124954] env[63297]: DEBUG nova.objects.instance [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lazy-loading 'flavor' on Instance uuid 41d4118d-7621-4ac9-be2f-2664cd691180 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.296600] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.317068] env[63297]: INFO nova.scheduler.client.report [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted allocations for instance c257ee27-8d87-4fe6-a953-cc4af1ec36d6 [ 1869.629789] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.630419] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquired lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.676780] env[63297]: DEBUG nova.network.neutron [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [{"id": "22445506-b44b-4648-8c7b-164ab284edb9", "address": "fa:16:3e:f3:77:fe", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22445506-b4", "ovs_interfaceid": "22445506-b44b-4648-8c7b-164ab284edb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.824220] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8dd4c3a2-2499-4506-9d88-dff109e205f8 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "c257ee27-8d87-4fe6-a953-cc4af1ec36d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.114s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.990968] env[63297]: DEBUG nova.network.neutron [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1870.015311] env[63297]: DEBUG nova.compute.manager [req-5ebc169f-fccd-4ef7-8a9d-9a4f616994bc req-40280f5d-9113-4b1a-9a05-7286c43402f4 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Received event network-changed-199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1870.015525] env[63297]: DEBUG nova.compute.manager [req-5ebc169f-fccd-4ef7-8a9d-9a4f616994bc req-40280f5d-9113-4b1a-9a05-7286c43402f4 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Refreshing instance network info cache due to event network-changed-199fed1c-c158-4c42-85e0-83b3e9035230. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1870.015773] env[63297]: DEBUG oslo_concurrency.lockutils [req-5ebc169f-fccd-4ef7-8a9d-9a4f616994bc req-40280f5d-9113-4b1a-9a05-7286c43402f4 service nova] Acquiring lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.179204] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.464882] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1870.465814] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9974247-6464-4271-aa69-ac7472fb5ac7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.475131] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1870.475131] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99ba8db4-885e-45f6-9c60-0bc3a272c5d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.551229] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1870.551482] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1870.551670] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleting the datastore file [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1870.551942] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82d6c455-f5d6-44c1-85ef-ffce77703f67 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.558063] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1870.558063] env[63297]: value = "task-1698440" [ 1870.558063] env[63297]: _type = "Task" [ 1870.558063] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.567969] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698440, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.727627] env[63297]: DEBUG nova.network.neutron [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.067781] env[63297]: DEBUG oslo_vmware.api [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698440, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127604} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.068029] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1871.068220] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1871.068399] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1871.089965] env[63297]: INFO nova.scheduler.client.report [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted allocations for instance 1316bf99-cc93-4d1a-b31c-000dac095b3e [ 1871.230726] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Releasing lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.230948] env[63297]: DEBUG nova.compute.manager [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Inject network info {{(pid=63297) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1871.231927] env[63297]: DEBUG nova.compute.manager [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] network_info to inject: |[{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1871.236044] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Reconfiguring VM instance to set the machine id {{(pid=63297) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1871.236363] env[63297]: DEBUG oslo_concurrency.lockutils [req-5ebc169f-fccd-4ef7-8a9d-9a4f616994bc req-40280f5d-9113-4b1a-9a05-7286c43402f4 service nova] Acquired lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.236540] env[63297]: DEBUG nova.network.neutron [req-5ebc169f-fccd-4ef7-8a9d-9a4f616994bc req-40280f5d-9113-4b1a-9a05-7286c43402f4 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Refreshing network info cache for port 199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1871.237693] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b8dc7b1-651b-47e3-ae49-13ae014b60e1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.256375] env[63297]: DEBUG oslo_vmware.api [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1871.256375] env[63297]: value = "task-1698441" [ 1871.256375] env[63297]: _type = "Task" [ 1871.256375] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.264437] env[63297]: DEBUG oslo_vmware.api [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698441, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.340043] env[63297]: DEBUG nova.objects.instance [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lazy-loading 'flavor' on Instance uuid 41d4118d-7621-4ac9-be2f-2664cd691180 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1871.594543] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.594839] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.595068] env[63297]: DEBUG nova.objects.instance [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'resources' on Instance uuid 1316bf99-cc93-4d1a-b31c-000dac095b3e {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1871.766066] env[63297]: DEBUG oslo_vmware.api [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698441, 'name': ReconfigVM_Task, 'duration_secs': 0.154535} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.766342] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b0ef92d4-5963-48bc-ae46-34d307bf2698 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Reconfigured VM instance to set the machine id {{(pid=63297) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1871.843968] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.066309] env[63297]: DEBUG nova.compute.manager [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received event network-vif-unplugged-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1872.066309] env[63297]: DEBUG oslo_concurrency.lockutils [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] Acquiring lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.066309] env[63297]: DEBUG oslo_concurrency.lockutils [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.066309] env[63297]: DEBUG oslo_concurrency.lockutils [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.066309] env[63297]: DEBUG nova.compute.manager [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] No waiting events found dispatching network-vif-unplugged-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1872.066309] env[63297]: WARNING nova.compute.manager [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received unexpected event network-vif-unplugged-22445506-b44b-4648-8c7b-164ab284edb9 for instance with vm_state shelved_offloaded and task_state unshelving. [ 1872.066309] env[63297]: DEBUG nova.compute.manager [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received event network-changed-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1872.066309] env[63297]: DEBUG nova.compute.manager [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Refreshing instance network info cache due to event network-changed-22445506-b44b-4648-8c7b-164ab284edb9. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1872.066309] env[63297]: DEBUG oslo_concurrency.lockutils [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] Acquiring lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.066309] env[63297]: DEBUG oslo_concurrency.lockutils [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] Acquired lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.066309] env[63297]: DEBUG nova.network.neutron [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Refreshing network info cache for port 22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1872.067439] env[63297]: DEBUG nova.network.neutron [req-5ebc169f-fccd-4ef7-8a9d-9a4f616994bc req-40280f5d-9113-4b1a-9a05-7286c43402f4 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updated VIF entry in instance network info cache for port 199fed1c-c158-4c42-85e0-83b3e9035230. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1872.071824] env[63297]: DEBUG nova.network.neutron [req-5ebc169f-fccd-4ef7-8a9d-9a4f616994bc req-40280f5d-9113-4b1a-9a05-7286c43402f4 service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.097564] env[63297]: DEBUG nova.objects.instance [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'numa_topology' on Instance uuid 1316bf99-cc93-4d1a-b31c-000dac095b3e {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1872.307844] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.308155] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.308370] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.308558] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.308765] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.310895] env[63297]: INFO nova.compute.manager [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Terminating instance [ 1872.312446] env[63297]: DEBUG nova.compute.manager [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1872.312666] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1872.313523] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab54104-b9b5-4358-839c-42b752dfeafc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.321159] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1872.321381] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88d522ec-f12d-4d26-8ee3-495c0d57ed4a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.327191] env[63297]: DEBUG oslo_vmware.api [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1872.327191] env[63297]: value = "task-1698442" [ 1872.327191] env[63297]: _type = "Task" [ 1872.327191] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.336855] env[63297]: DEBUG oslo_vmware.api [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.573667] env[63297]: DEBUG oslo_concurrency.lockutils [req-5ebc169f-fccd-4ef7-8a9d-9a4f616994bc req-40280f5d-9113-4b1a-9a05-7286c43402f4 service nova] Releasing lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.574104] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquired lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.599970] env[63297]: DEBUG nova.objects.base [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Object Instance<1316bf99-cc93-4d1a-b31c-000dac095b3e> lazy-loaded attributes: resources,numa_topology {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1872.659224] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bac053c-2c10-4af3-bec1-bbde28b65601 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.667264] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69942c9d-8930-4f0d-a00e-a2231a3a4165 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.701794] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2df2085-b0a8-422e-addc-cf4e7d0a6eb0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.712768] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd351fb4-2d87-4f2e-934f-cd89e91a1f56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.727438] env[63297]: DEBUG nova.compute.provider_tree [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.801121] env[63297]: DEBUG nova.network.neutron [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updated VIF entry in instance network info cache for port 22445506-b44b-4648-8c7b-164ab284edb9. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1872.801528] env[63297]: DEBUG nova.network.neutron [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [{"id": "22445506-b44b-4648-8c7b-164ab284edb9", "address": "fa:16:3e:f3:77:fe", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": null, "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap22445506-b4", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.836531] env[63297]: DEBUG oslo_vmware.api [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698442, 'name': PowerOffVM_Task, 'duration_secs': 0.182256} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.836787] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1872.836953] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1872.837291] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-637c7f4b-9928-4197-9e3c-7b3acc49af39 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.872516] env[63297]: DEBUG nova.network.neutron [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1872.899316] env[63297]: DEBUG nova.compute.manager [req-26b9ca08-d687-44d1-bec5-acfdf147bbcd req-fd5e6363-4a1b-43f8-b6c4-ba9f75cbe79a service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Received event network-changed-199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1872.899539] env[63297]: DEBUG nova.compute.manager [req-26b9ca08-d687-44d1-bec5-acfdf147bbcd req-fd5e6363-4a1b-43f8-b6c4-ba9f75cbe79a service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Refreshing instance network info cache due to event network-changed-199fed1c-c158-4c42-85e0-83b3e9035230. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1872.899740] env[63297]: DEBUG oslo_concurrency.lockutils [req-26b9ca08-d687-44d1-bec5-acfdf147bbcd req-fd5e6363-4a1b-43f8-b6c4-ba9f75cbe79a service nova] Acquiring lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.208614] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.230771] env[63297]: DEBUG nova.scheduler.client.report [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1873.304875] env[63297]: DEBUG oslo_concurrency.lockutils [req-178b536a-6c84-4864-a4a1-d4a48074aa71 req-983b383a-511c-4ea9-b997-23d7e691f6e2 service nova] Releasing lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.595164] env[63297]: DEBUG nova.network.neutron [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.735418] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.140s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.097986] env[63297]: DEBUG oslo_concurrency.lockutils [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Releasing lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.098168] env[63297]: DEBUG nova.compute.manager [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Inject network info {{(pid=63297) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1874.098381] env[63297]: DEBUG nova.compute.manager [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] network_info to inject: |[{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1874.103286] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Reconfiguring VM instance to set the machine id {{(pid=63297) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1874.103604] env[63297]: DEBUG oslo_concurrency.lockutils [req-26b9ca08-d687-44d1-bec5-acfdf147bbcd req-fd5e6363-4a1b-43f8-b6c4-ba9f75cbe79a service nova] Acquired lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.103777] env[63297]: DEBUG nova.network.neutron [req-26b9ca08-d687-44d1-bec5-acfdf147bbcd req-fd5e6363-4a1b-43f8-b6c4-ba9f75cbe79a service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Refreshing network info cache for port 199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1874.104950] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca55f2b3-55ea-47cd-8305-4a6ceb630c78 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.124147] env[63297]: DEBUG oslo_vmware.api [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1874.124147] env[63297]: value = "task-1698444" [ 1874.124147] env[63297]: _type = "Task" [ 1874.124147] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.132268] env[63297]: DEBUG oslo_vmware.api [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.245955] env[63297]: DEBUG oslo_concurrency.lockutils [None req-812555ca-21f3-4d2e-83ec-9e390365dbf3 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 18.975s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.246776] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.038s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.246981] env[63297]: INFO nova.compute.manager [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Unshelving [ 1874.371155] env[63297]: DEBUG nova.network.neutron [req-26b9ca08-d687-44d1-bec5-acfdf147bbcd req-fd5e6363-4a1b-43f8-b6c4-ba9f75cbe79a service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updated VIF entry in instance network info cache for port 199fed1c-c158-4c42-85e0-83b3e9035230. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1874.371536] env[63297]: DEBUG nova.network.neutron [req-26b9ca08-d687-44d1-bec5-acfdf147bbcd req-fd5e6363-4a1b-43f8-b6c4-ba9f75cbe79a service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [{"id": "199fed1c-c158-4c42-85e0-83b3e9035230", "address": "fa:16:3e:22:93:2a", "network": {"id": "88ab7e6e-b26f-40e8-a725-1d0f995a7758", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-216828050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8591e3bee13e474b88f592525d95a2e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199fed1c-c1", "ovs_interfaceid": "199fed1c-c158-4c42-85e0-83b3e9035230", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.634434] env[63297]: DEBUG oslo_vmware.api [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698444, 'name': ReconfigVM_Task, 'duration_secs': 0.176973} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.634721] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-d6e04f8f-a12e-460d-9a25-a49b22cb6ea1 tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Reconfigured VM instance to set the machine id {{(pid=63297) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1874.874495] env[63297]: DEBUG oslo_concurrency.lockutils [req-26b9ca08-d687-44d1-bec5-acfdf147bbcd req-fd5e6363-4a1b-43f8-b6c4-ba9f75cbe79a service nova] Releasing lock "refresh_cache-41d4118d-7621-4ac9-be2f-2664cd691180" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.007851] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "41d4118d-7621-4ac9-be2f-2664cd691180" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.008169] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "41d4118d-7621-4ac9-be2f-2664cd691180" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.008362] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "41d4118d-7621-4ac9-be2f-2664cd691180-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.008557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "41d4118d-7621-4ac9-be2f-2664cd691180-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.008729] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "41d4118d-7621-4ac9-be2f-2664cd691180-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.011015] env[63297]: INFO nova.compute.manager [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Terminating instance [ 1875.012959] env[63297]: DEBUG nova.compute.manager [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1875.013170] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1875.013990] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6bd406-6b3a-49cc-a79d-6a1275215a3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.021876] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1875.022105] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c568ae7c-06f3-4a9b-8d3f-1191043449ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.028277] env[63297]: DEBUG oslo_vmware.api [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1875.028277] env[63297]: value = "task-1698445" [ 1875.028277] env[63297]: _type = "Task" [ 1875.028277] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.036594] env[63297]: DEBUG oslo_vmware.api [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.271929] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.272252] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.272470] env[63297]: DEBUG nova.objects.instance [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'pci_requests' on Instance uuid 1316bf99-cc93-4d1a-b31c-000dac095b3e {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.538595] env[63297]: DEBUG oslo_vmware.api [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698445, 'name': PowerOffVM_Task, 'duration_secs': 0.186576} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.538873] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1875.539054] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1875.539538] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b24bccc6-2849-4007-a9a2-873661e79ed5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.776744] env[63297]: DEBUG nova.objects.instance [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'numa_topology' on Instance uuid 1316bf99-cc93-4d1a-b31c-000dac095b3e {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.281729] env[63297]: INFO nova.compute.claims [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1877.348603] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea57d2e-faf5-426e-a7c2-de48e5755cb1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.355901] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19ed632-eeba-47d8-833f-ac98037ebfc3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.384843] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1414e2d1-5270-46c7-8595-72546bbe2ea6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.391447] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb3823a-0a4a-43eb-9205-4e89f23b0e53 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.405162] env[63297]: DEBUG nova.compute.provider_tree [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.908733] env[63297]: DEBUG nova.scheduler.client.report [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1878.413677] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.141s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.440950] env[63297]: INFO nova.network.neutron [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating port 22445506-b44b-4648-8c7b-164ab284edb9 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1878.993700] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1878.993929] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1878.994201] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleting the datastore file [datastore1] fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1878.994428] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c518821-d563-437a-a923-72b7339e19cd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.997594] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1878.997849] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1878.998054] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Deleting the datastore file [datastore1] 41d4118d-7621-4ac9-be2f-2664cd691180 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1878.998284] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-413935c9-7ebf-4b3f-9c29-735c79fc134f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.001787] env[63297]: DEBUG oslo_vmware.api [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for the task: (returnval){ [ 1879.001787] env[63297]: value = "task-1698447" [ 1879.001787] env[63297]: _type = "Task" [ 1879.001787] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.005592] env[63297]: DEBUG oslo_vmware.api [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for the task: (returnval){ [ 1879.005592] env[63297]: value = "task-1698448" [ 1879.005592] env[63297]: _type = "Task" [ 1879.005592] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.011478] env[63297]: DEBUG oslo_vmware.api [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.016191] env[63297]: DEBUG oslo_vmware.api [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.514625] env[63297]: DEBUG oslo_vmware.api [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Task: {'id': task-1698448, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128624} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.517541] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1879.517735] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1879.517920] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1879.518111] env[63297]: INFO nova.compute.manager [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Took 4.50 seconds to destroy the instance on the hypervisor. [ 1879.518345] env[63297]: DEBUG oslo.service.loopingcall [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1879.518544] env[63297]: DEBUG oslo_vmware.api [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Task: {'id': task-1698447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137417} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.518741] env[63297]: DEBUG nova.compute.manager [-] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1879.518839] env[63297]: DEBUG nova.network.neutron [-] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1879.520371] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1879.520553] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1879.520724] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1879.520916] env[63297]: INFO nova.compute.manager [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Took 7.21 seconds to destroy the instance on the hypervisor. [ 1879.521149] env[63297]: DEBUG oslo.service.loopingcall [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1879.521330] env[63297]: DEBUG nova.compute.manager [-] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1879.521419] env[63297]: DEBUG nova.network.neutron [-] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1879.873641] env[63297]: DEBUG nova.compute.manager [req-7502b8ab-6a26-44d2-8d39-357860c551b5 req-277fd8a4-87d6-4a94-bb7b-6271bea17f79 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Received event network-vif-deleted-1952432a-7339-4c5d-80fc-5dac66b659e2 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1879.873882] env[63297]: INFO nova.compute.manager [req-7502b8ab-6a26-44d2-8d39-357860c551b5 req-277fd8a4-87d6-4a94-bb7b-6271bea17f79 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Neutron deleted interface 1952432a-7339-4c5d-80fc-5dac66b659e2; detaching it from the instance and deleting it from the info cache [ 1879.874018] env[63297]: DEBUG nova.network.neutron [req-7502b8ab-6a26-44d2-8d39-357860c551b5 req-277fd8a4-87d6-4a94-bb7b-6271bea17f79 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.997748] env[63297]: DEBUG nova.compute.manager [req-86e31ec0-7898-49b2-877e-d93d22748d45 req-948ba29f-0eba-4a4f-a7c3-988fbe00c50f service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Received event network-vif-deleted-199fed1c-c158-4c42-85e0-83b3e9035230 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1879.997940] env[63297]: INFO nova.compute.manager [req-86e31ec0-7898-49b2-877e-d93d22748d45 req-948ba29f-0eba-4a4f-a7c3-988fbe00c50f service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Neutron deleted interface 199fed1c-c158-4c42-85e0-83b3e9035230; detaching it from the instance and deleting it from the info cache [ 1879.998140] env[63297]: DEBUG nova.network.neutron [req-86e31ec0-7898-49b2-877e-d93d22748d45 req-948ba29f-0eba-4a4f-a7c3-988fbe00c50f service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.006238] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.006396] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.007230] env[63297]: DEBUG nova.network.neutron [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1880.347079] env[63297]: DEBUG nova.network.neutron [-] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.377038] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c00a4fb-4e55-4e8d-ade0-b6283b228524 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.386426] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6972264-f966-434f-9a6c-43807389679a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.411767] env[63297]: DEBUG nova.compute.manager [req-7502b8ab-6a26-44d2-8d39-357860c551b5 req-277fd8a4-87d6-4a94-bb7b-6271bea17f79 service nova] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Detach interface failed, port_id=1952432a-7339-4c5d-80fc-5dac66b659e2, reason: Instance fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1880.478938] env[63297]: DEBUG nova.network.neutron [-] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.501953] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd545704-7e26-4db4-a8c5-737cf7e262b6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.511864] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61d7acd-8337-47e7-9d04-1d89fdb1c02c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.539064] env[63297]: DEBUG nova.compute.manager [req-86e31ec0-7898-49b2-877e-d93d22748d45 req-948ba29f-0eba-4a4f-a7c3-988fbe00c50f service nova] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Detach interface failed, port_id=199fed1c-c158-4c42-85e0-83b3e9035230, reason: Instance 41d4118d-7621-4ac9-be2f-2664cd691180 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1880.729512] env[63297]: DEBUG nova.network.neutron [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [{"id": "22445506-b44b-4648-8c7b-164ab284edb9", "address": "fa:16:3e:f3:77:fe", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22445506-b4", "ovs_interfaceid": "22445506-b44b-4648-8c7b-164ab284edb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.850084] env[63297]: INFO nova.compute.manager [-] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Took 1.33 seconds to deallocate network for instance. [ 1880.981744] env[63297]: INFO nova.compute.manager [-] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Took 1.46 seconds to deallocate network for instance. [ 1881.231876] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.258283] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a6e5f0a60482438c60dcbf7b4753cc09',container_format='bare',created_at=2024-12-10T17:33:28Z,direct_url=,disk_format='vmdk',id=9334e852-eb03-4385-8cae-1fb5b4d5a3e5,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-737764325-shelved',owner='6e3dcd98ebe94a75a94322b03feba3b4',properties=ImageMetaProps,protected=,size=31663616,status='active',tags=,updated_at=2024-12-10T17:33:41Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1881.258518] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1881.258671] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1881.258848] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1881.258994] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1881.259154] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1881.259358] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1881.259511] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1881.259673] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1881.259883] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1881.260093] env[63297]: DEBUG nova.virt.hardware [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1881.260970] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33870f0e-1b22-4053-87dd-93b571628bf4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.268954] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15907e0-bb88-409f-825c-441d82bbcf6a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.282053] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:77:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e55c248-c504-4c7a-bbe9-f42cf417aee7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22445506-b44b-4648-8c7b-164ab284edb9', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1881.289383] env[63297]: DEBUG oslo.service.loopingcall [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1881.289587] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1881.289779] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d3ce5eb-b081-408f-b3d9-937fac267a1d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.307571] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1881.307571] env[63297]: value = "task-1698449" [ 1881.307571] env[63297]: _type = "Task" [ 1881.307571] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.314852] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698449, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.356078] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.356325] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.356577] env[63297]: DEBUG nova.objects.instance [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lazy-loading 'resources' on Instance uuid fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1881.488080] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.817316] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698449, 'name': CreateVM_Task, 'duration_secs': 0.385099} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.817571] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1881.818206] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.818388] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.818778] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1881.819051] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52cb3813-a661-4e54-ac43-6b2139152c28 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.823457] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1881.823457] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]526de180-6fc4-4adc-93c6-9395f1133b39" [ 1881.823457] env[63297]: _type = "Task" [ 1881.823457] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.830639] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]526de180-6fc4-4adc-93c6-9395f1133b39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.900994] env[63297]: DEBUG nova.compute.manager [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received event network-vif-plugged-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1881.901114] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] Acquiring lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.901324] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.901929] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.901929] env[63297]: DEBUG nova.compute.manager [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] No waiting events found dispatching network-vif-plugged-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1881.901929] env[63297]: WARNING nova.compute.manager [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received unexpected event network-vif-plugged-22445506-b44b-4648-8c7b-164ab284edb9 for instance with vm_state shelved_offloaded and task_state spawning. [ 1881.901929] env[63297]: DEBUG nova.compute.manager [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received event network-changed-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1881.902231] env[63297]: DEBUG nova.compute.manager [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Refreshing instance network info cache due to event network-changed-22445506-b44b-4648-8c7b-164ab284edb9. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1881.902231] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] Acquiring lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.902456] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] Acquired lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.902540] env[63297]: DEBUG nova.network.neutron [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Refreshing network info cache for port 22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1881.927691] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f9929c-8345-4223-954a-4111d0e90ddf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.936848] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b21295e-a084-4a30-ab61-f09f3a1944d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.966979] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6fb450-d4f8-4c71-9c8d-1f1d789960c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.975085] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962f901d-e0da-4f4c-b62d-a1656aecc9b4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.988305] env[63297]: DEBUG nova.compute.provider_tree [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1882.333718] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.333926] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Processing image 9334e852-eb03-4385-8cae-1fb5b4d5a3e5 {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1882.334188] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5/9334e852-eb03-4385-8cae-1fb5b4d5a3e5.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.334338] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5/9334e852-eb03-4385-8cae-1fb5b4d5a3e5.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.334511] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1882.334780] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a80f977-6ee3-45f6-b23a-cf64f989e6e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.342703] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1882.342872] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1882.343556] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77ccdc7a-63a4-4a4a-9809-cea38c3594f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.348158] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1882.348158] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233cf1d-48cf-3be4-d674-b44f1f6a190d" [ 1882.348158] env[63297]: _type = "Task" [ 1882.348158] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.355789] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5233cf1d-48cf-3be4-d674-b44f1f6a190d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.508996] env[63297]: ERROR nova.scheduler.client.report [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] [req-2d03cdf1-c7b3-4796-a682-6ed2c664d6e4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2d03cdf1-c7b3-4796-a682-6ed2c664d6e4"}]} [ 1882.525887] env[63297]: DEBUG nova.scheduler.client.report [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1882.539044] env[63297]: DEBUG nova.scheduler.client.report [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1882.540198] env[63297]: DEBUG nova.compute.provider_tree [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1882.548833] env[63297]: DEBUG nova.scheduler.client.report [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1882.566970] env[63297]: DEBUG nova.scheduler.client.report [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1882.616918] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726fa228-f7a2-4854-9134-200d116e2455 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.621983] env[63297]: DEBUG nova.network.neutron [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updated VIF entry in instance network info cache for port 22445506-b44b-4648-8c7b-164ab284edb9. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1882.622312] env[63297]: DEBUG nova.network.neutron [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [{"id": "22445506-b44b-4648-8c7b-164ab284edb9", "address": "fa:16:3e:f3:77:fe", "network": {"id": "623ab0c2-8f3d-48fb-949e-23c51a946810", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-393425129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e3dcd98ebe94a75a94322b03feba3b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22445506-b4", "ovs_interfaceid": "22445506-b44b-4648-8c7b-164ab284edb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.628364] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a993e89e-a756-4186-b437-c453f8546b41 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.658101] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd1471f-3d11-4658-813a-047673024f9b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.665201] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576d5f9e-b0c7-4363-8697-1d810e254a82 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.677931] env[63297]: DEBUG nova.compute.provider_tree [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1882.858021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Preparing fetch location {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1882.858382] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Fetch image to [datastore1] OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015/OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015.vmdk {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1882.858472] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Downloading stream optimized image 9334e852-eb03-4385-8cae-1fb5b4d5a3e5 to [datastore1] OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015/OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015.vmdk on the data store datastore1 as vApp {{(pid=63297) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1882.858622] env[63297]: DEBUG nova.virt.vmwareapi.images [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Downloading image file data 9334e852-eb03-4385-8cae-1fb5b4d5a3e5 to the ESX as VM named 'OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015' {{(pid=63297) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1882.928024] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1882.928024] env[63297]: value = "resgroup-9" [ 1882.928024] env[63297]: _type = "ResourcePool" [ 1882.928024] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1882.928579] env[63297]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-0651fb2e-9249-42ee-9d5b-28932941290f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.948828] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease: (returnval){ [ 1882.948828] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52defdd3-411a-9c92-670e-0d7212b7c865" [ 1882.948828] env[63297]: _type = "HttpNfcLease" [ 1882.948828] env[63297]: } obtained for vApp import into resource pool (val){ [ 1882.948828] env[63297]: value = "resgroup-9" [ 1882.948828] env[63297]: _type = "ResourcePool" [ 1882.948828] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1882.949167] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the lease: (returnval){ [ 1882.949167] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52defdd3-411a-9c92-670e-0d7212b7c865" [ 1882.949167] env[63297]: _type = "HttpNfcLease" [ 1882.949167] env[63297]: } to be ready. {{(pid=63297) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1882.954877] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1882.954877] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52defdd3-411a-9c92-670e-0d7212b7c865" [ 1882.954877] env[63297]: _type = "HttpNfcLease" [ 1882.954877] env[63297]: } is initializing. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1883.125592] env[63297]: DEBUG oslo_concurrency.lockutils [req-b0554324-cb96-4cbc-a4a8-8f1d4cd10995 req-56d76c9f-1f1d-495f-a4be-0634f0ed023d service nova] Releasing lock "refresh_cache-1316bf99-cc93-4d1a-b31c-000dac095b3e" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.210053] env[63297]: DEBUG nova.scheduler.client.report [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 174 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1883.210355] env[63297]: DEBUG nova.compute.provider_tree [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 174 to 175 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1883.210537] env[63297]: DEBUG nova.compute.provider_tree [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1883.457298] env[63297]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1883.457298] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52defdd3-411a-9c92-670e-0d7212b7c865" [ 1883.457298] env[63297]: _type = "HttpNfcLease" [ 1883.457298] env[63297]: } is ready. {{(pid=63297) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1883.457609] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1883.457609] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52defdd3-411a-9c92-670e-0d7212b7c865" [ 1883.457609] env[63297]: _type = "HttpNfcLease" [ 1883.457609] env[63297]: }. {{(pid=63297) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1883.458297] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ded4f0a-2529-437a-8fa4-9ab24e0aa07b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.465501] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e1043-99a7-13fc-ad63-999fbcbaf8e0/disk-0.vmdk from lease info. {{(pid=63297) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1883.465676] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating HTTP connection to write to file with size = 31663616 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e1043-99a7-13fc-ad63-999fbcbaf8e0/disk-0.vmdk. {{(pid=63297) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1883.529430] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ae0ef248-d18c-444d-bf67-6cad1961f99e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.715626] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.359s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.717841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.230s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.718114] env[63297]: DEBUG nova.objects.instance [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lazy-loading 'resources' on Instance uuid 41d4118d-7621-4ac9-be2f-2664cd691180 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.740370] env[63297]: INFO nova.scheduler.client.report [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Deleted allocations for instance fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69 [ 1884.250038] env[63297]: DEBUG oslo_concurrency.lockutils [None req-bf86e03f-760f-434c-b449-cdb914e45b41 tempest-ServerActionsTestOtherA-1231642848 tempest-ServerActionsTestOtherA-1231642848-project-member] Lock "fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.942s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.275861] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d8b18e-f1a6-4fdb-985f-0e04065fcbbc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.287277] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87eb242-90e3-4e2c-b902-4c35a0c2abff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.325633] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7484ea-1b8e-40a6-bf19-a4a7e5ae7bf5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.333962] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6064a26-6865-410a-b7d3-b306c37b1c58 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.349446] env[63297]: DEBUG nova.compute.provider_tree [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.638642] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Completed reading data from the image iterator. {{(pid=63297) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1884.638982] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e1043-99a7-13fc-ad63-999fbcbaf8e0/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1884.639948] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0ac8e4-c6dd-4db5-af4b-4dc54e6eb9d9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.647375] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e1043-99a7-13fc-ad63-999fbcbaf8e0/disk-0.vmdk is in state: ready. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1884.647545] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e1043-99a7-13fc-ad63-999fbcbaf8e0/disk-0.vmdk. {{(pid=63297) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1884.647775] env[63297]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a4dc6d2b-83fb-492e-a0bc-341349294509 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.841350] env[63297]: DEBUG oslo_vmware.rw_handles [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e1043-99a7-13fc-ad63-999fbcbaf8e0/disk-0.vmdk. {{(pid=63297) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1884.841711] env[63297]: INFO nova.virt.vmwareapi.images [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Downloaded image file data 9334e852-eb03-4385-8cae-1fb5b4d5a3e5 [ 1884.842476] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6eecfa-b7c8-4add-ae30-92e5383bfbd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.859914] env[63297]: DEBUG nova.scheduler.client.report [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1884.863348] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-707d6d25-c422-40f3-88b7-42f76465de97 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.890383] env[63297]: INFO nova.virt.vmwareapi.images [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] The imported VM was unregistered [ 1884.892843] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Caching image {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1884.893100] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Creating directory with path [datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5 {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1884.893608] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-004c4cc2-af6c-4ca8-bd8f-80c68cc9f2a0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.904024] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Created directory with path [datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5 {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1884.904270] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015/OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015.vmdk to [datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5/9334e852-eb03-4385-8cae-1fb5b4d5a3e5.vmdk. {{(pid=63297) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1884.904521] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ba741d40-6124-45cc-bad0-d55ae7f7cde2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.911469] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1884.911469] env[63297]: value = "task-1698452" [ 1884.911469] env[63297]: _type = "Task" [ 1884.911469] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.919476] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698452, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.367420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.649s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.400054] env[63297]: INFO nova.scheduler.client.report [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Deleted allocations for instance 41d4118d-7621-4ac9-be2f-2664cd691180 [ 1885.426288] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698452, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.909143] env[63297]: DEBUG oslo_concurrency.lockutils [None req-774d1485-3e00-4eb1-a551-b047e2b5179a tempest-AttachInterfacesUnderV243Test-1990803827 tempest-AttachInterfacesUnderV243Test-1990803827-project-member] Lock "41d4118d-7621-4ac9-be2f-2664cd691180" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.901s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.926728] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698452, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.422901] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698452, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.923834] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698452, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.427379] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698452, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.416371} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.427660] env[63297]: INFO nova.virt.vmwareapi.ds_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015/OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015.vmdk to [datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5/9334e852-eb03-4385-8cae-1fb5b4d5a3e5.vmdk. [ 1887.427847] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Cleaning up location [datastore1] OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015 {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1887.428016] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b68313da-28bd-458e-8236-7af842a5b015 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1887.428280] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1524d44-826c-4b63-8132-c2cec166c79e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.435366] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1887.435366] env[63297]: value = "task-1698453" [ 1887.435366] env[63297]: _type = "Task" [ 1887.435366] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.444016] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.945957] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035962} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.946506] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1887.946709] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5/9334e852-eb03-4385-8cae-1fb5b4d5a3e5.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.946952] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5/9334e852-eb03-4385-8cae-1fb5b4d5a3e5.vmdk to [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e/1316bf99-cc93-4d1a-b31c-000dac095b3e.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1887.947225] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b83d53e2-31e4-41c8-b9da-f577f454261b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.954803] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1887.954803] env[63297]: value = "task-1698454" [ 1887.954803] env[63297]: _type = "Task" [ 1887.954803] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.962540] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.468354] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698454, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.966521] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698454, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.469345] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698454, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.926013] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquiring lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.926703] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.969086] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698454, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.430670] env[63297]: DEBUG nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1890.472103] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698454, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.958026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.958172] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.959706] env[63297]: INFO nova.compute.claims [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1890.972838] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698454, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.561464} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.973109] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9334e852-eb03-4385-8cae-1fb5b4d5a3e5/9334e852-eb03-4385-8cae-1fb5b4d5a3e5.vmdk to [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e/1316bf99-cc93-4d1a-b31c-000dac095b3e.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1890.973909] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12833976-3d8c-4f94-871b-ceb0445557f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.995896] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e/1316bf99-cc93-4d1a-b31c-000dac095b3e.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1890.996358] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4971224-818f-4f16-a404-49c5b967f2f4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.016654] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1891.016654] env[63297]: value = "task-1698455" [ 1891.016654] env[63297]: _type = "Task" [ 1891.016654] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.024881] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698455, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.526377] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698455, 'name': ReconfigVM_Task, 'duration_secs': 0.329641} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.526712] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e/1316bf99-cc93-4d1a-b31c-000dac095b3e.vmdk or device None with type streamOptimized {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1891.527320] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ae03f05-b4c7-4e50-99d9-0a3dec2a9a5e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.533746] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1891.533746] env[63297]: value = "task-1698456" [ 1891.533746] env[63297]: _type = "Task" [ 1891.533746] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.542021] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698456, 'name': Rename_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.827595] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.827820] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.028505] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d803af1d-6a5a-4721-9cdd-b33fec4541ad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.039357] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417ea625-7445-4139-92ed-35f56c3c6a55 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.047212] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698456, 'name': Rename_Task, 'duration_secs': 0.14369} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.070998] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1892.071500] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8713e0bf-8196-45b5-9a1f-8211a146a983 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.073595] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631accc0-5fb5-4c06-b8d8-f48f5b2e16a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.083109] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62380a7b-03b7-4d5e-a6e3-b114565eaef2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.086708] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1892.086708] env[63297]: value = "task-1698457" [ 1892.086708] env[63297]: _type = "Task" [ 1892.086708] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.097737] env[63297]: DEBUG nova.compute.provider_tree [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1892.106300] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698457, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.330288] env[63297]: DEBUG nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1892.603864] env[63297]: DEBUG oslo_vmware.api [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698457, 'name': PowerOnVM_Task, 'duration_secs': 0.495051} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.604387] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1892.620420] env[63297]: ERROR nova.scheduler.client.report [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [req-dd329553-9b0e-4e55-80fc-841a8e7b13d4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dd329553-9b0e-4e55-80fc-841a8e7b13d4"}]} [ 1892.637917] env[63297]: DEBUG nova.scheduler.client.report [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1892.653056] env[63297]: DEBUG nova.scheduler.client.report [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1892.653307] env[63297]: DEBUG nova.compute.provider_tree [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1892.665371] env[63297]: DEBUG nova.scheduler.client.report [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1892.685474] env[63297]: DEBUG nova.scheduler.client.report [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1892.708309] env[63297]: DEBUG nova.compute.manager [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1892.709225] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4277807-6de9-4a39-bff1-4142670d9c88 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.754683] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e041d296-f350-4bf1-9699-1282c876557a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.761732] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7adf09-1657-4b5d-bc1b-d62e0e230011 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.792788] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b30b1f-81aa-45e2-ad13-b89399062412 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.800304] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323e3d1f-0126-47a2-bb26-fe5795d700f7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.814220] env[63297]: DEBUG nova.compute.provider_tree [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1892.848653] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.229928] env[63297]: DEBUG oslo_concurrency.lockutils [None req-8ab0e97d-8c35-4ac6-b6fc-71c23b7f4c23 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.983s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.345049] env[63297]: DEBUG nova.scheduler.client.report [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 177 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1893.345409] env[63297]: DEBUG nova.compute.provider_tree [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 177 to 178 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1893.345641] env[63297]: DEBUG nova.compute.provider_tree [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1893.660940] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.661426] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.661661] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.661852] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.662039] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.664133] env[63297]: INFO nova.compute.manager [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Terminating instance [ 1893.665758] env[63297]: DEBUG nova.compute.manager [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1893.665947] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1893.666792] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fff25e5-89e2-46aa-8d65-c99f42c52ce7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.674642] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1893.674862] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12b46e8c-e366-4a9c-a972-275d05dd6c72 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.681713] env[63297]: DEBUG oslo_vmware.api [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1893.681713] env[63297]: value = "task-1698458" [ 1893.681713] env[63297]: _type = "Task" [ 1893.681713] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.689031] env[63297]: DEBUG oslo_vmware.api [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.850587] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.892s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.851209] env[63297]: DEBUG nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1893.853744] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.005s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.855269] env[63297]: INFO nova.compute.claims [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1894.192070] env[63297]: DEBUG oslo_vmware.api [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698458, 'name': PowerOffVM_Task, 'duration_secs': 0.176381} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.192258] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1894.192460] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1894.192762] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79763d1d-fca4-471d-a64f-3eb1134cf1b8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.351902] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1894.352156] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1894.352346] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleting the datastore file [datastore1] 1316bf99-cc93-4d1a-b31c-000dac095b3e {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1894.352612] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2081f6e-1ff3-4e32-a4df-fb5ace03ab38 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.362699] env[63297]: DEBUG nova.compute.utils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1894.364568] env[63297]: DEBUG oslo_vmware.api [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for the task: (returnval){ [ 1894.364568] env[63297]: value = "task-1698460" [ 1894.364568] env[63297]: _type = "Task" [ 1894.364568] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.365226] env[63297]: DEBUG nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1894.365478] env[63297]: DEBUG nova.network.neutron [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1894.380537] env[63297]: DEBUG oslo_vmware.api [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698460, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.417127] env[63297]: DEBUG nova.policy [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9e4efe015bc416ca0b18cbb7d460085', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a611a4dfaa0e4263903f31456f7c045b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1894.670088] env[63297]: DEBUG nova.network.neutron [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Successfully created port: 86b9790e-5619-4103-beb9-6cbf03712484 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1894.865707] env[63297]: DEBUG nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1894.882676] env[63297]: DEBUG oslo_vmware.api [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Task: {'id': task-1698460, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123451} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.883993] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1894.883993] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1894.883993] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1894.883993] env[63297]: INFO nova.compute.manager [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1894.883993] env[63297]: DEBUG oslo.service.loopingcall [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.884207] env[63297]: DEBUG nova.compute.manager [-] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1894.884207] env[63297]: DEBUG nova.network.neutron [-] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1894.948428] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89adcb9c-a60b-4e0b-9158-1602a33ca8f0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.956377] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a59cb9-d4d5-4da6-bdbc-dab6c0091bd8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.987834] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ca88b6-a2f4-40cb-a3ee-80853f858ad0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.995329] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d9d45a-6866-46eb-9c9e-bf58c214379a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.008539] env[63297]: DEBUG nova.compute.provider_tree [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1895.335554] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.335881] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.336443] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.336443] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.336443] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.339713] env[63297]: INFO nova.compute.manager [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Terminating instance [ 1895.342021] env[63297]: DEBUG nova.compute.manager [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1895.342021] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1895.342740] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0730e117-4ba8-4546-9235-21b32c3fa806 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.350789] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1895.351015] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b786b964-350c-47ab-ab59-5476ecd1932b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.355223] env[63297]: DEBUG nova.compute.manager [req-c0c24917-1f13-49d2-bd3f-7f4d3f2eab0e req-538fcbfa-3e99-4a23-bd18-fae0613255bf service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Received event network-vif-deleted-22445506-b44b-4648-8c7b-164ab284edb9 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1895.355223] env[63297]: INFO nova.compute.manager [req-c0c24917-1f13-49d2-bd3f-7f4d3f2eab0e req-538fcbfa-3e99-4a23-bd18-fae0613255bf service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Neutron deleted interface 22445506-b44b-4648-8c7b-164ab284edb9; detaching it from the instance and deleting it from the info cache [ 1895.355223] env[63297]: DEBUG nova.network.neutron [req-c0c24917-1f13-49d2-bd3f-7f4d3f2eab0e req-538fcbfa-3e99-4a23-bd18-fae0613255bf service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.360303] env[63297]: DEBUG oslo_vmware.api [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1895.360303] env[63297]: value = "task-1698461" [ 1895.360303] env[63297]: _type = "Task" [ 1895.360303] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.368479] env[63297]: DEBUG oslo_vmware.api [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.542388] env[63297]: DEBUG nova.scheduler.client.report [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 178 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1895.542388] env[63297]: DEBUG nova.compute.provider_tree [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 178 to 179 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1895.542388] env[63297]: DEBUG nova.compute.provider_tree [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1895.665335] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.828997] env[63297]: DEBUG nova.network.neutron [-] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.858211] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bcb9aba-6934-4092-9f5d-4dd16429e06a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.868994] env[63297]: DEBUG oslo_vmware.api [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698461, 'name': PowerOffVM_Task, 'duration_secs': 0.196032} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.870146] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1895.870368] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1895.870695] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ac2e5f5-5149-424b-892d-8aba8c251a04 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.876500] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7de8b5-5bd2-469d-8837-6fb509a01393 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.609034] env[63297]: DEBUG nova.network.neutron [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Successfully updated port: 86b9790e-5619-4103-beb9-6cbf03712484 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1896.612638] env[63297]: DEBUG nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1896.615149] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.615599] env[63297]: DEBUG nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1896.618988] env[63297]: INFO nova.compute.manager [-] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Took 1.73 seconds to deallocate network for instance. [ 1896.620036] env[63297]: DEBUG nova.compute.manager [req-8775bc1a-1ba2-4d2a-882d-2abdd80a01d4 req-4a043ebc-1068-4446-a54c-584f94b386d6 service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Received event network-vif-plugged-86b9790e-5619-4103-beb9-6cbf03712484 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1896.620243] env[63297]: DEBUG oslo_concurrency.lockutils [req-8775bc1a-1ba2-4d2a-882d-2abdd80a01d4 req-4a043ebc-1068-4446-a54c-584f94b386d6 service nova] Acquiring lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.620484] env[63297]: DEBUG oslo_concurrency.lockutils [req-8775bc1a-1ba2-4d2a-882d-2abdd80a01d4 req-4a043ebc-1068-4446-a54c-584f94b386d6 service nova] Lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.620669] env[63297]: DEBUG oslo_concurrency.lockutils [req-8775bc1a-1ba2-4d2a-882d-2abdd80a01d4 req-4a043ebc-1068-4446-a54c-584f94b386d6 service nova] Lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.620854] env[63297]: DEBUG nova.compute.manager [req-8775bc1a-1ba2-4d2a-882d-2abdd80a01d4 req-4a043ebc-1068-4446-a54c-584f94b386d6 service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] No waiting events found dispatching network-vif-plugged-86b9790e-5619-4103-beb9-6cbf03712484 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1896.621080] env[63297]: WARNING nova.compute.manager [req-8775bc1a-1ba2-4d2a-882d-2abdd80a01d4 req-4a043ebc-1068-4446-a54c-584f94b386d6 service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Received unexpected event network-vif-plugged-86b9790e-5619-4103-beb9-6cbf03712484 for instance with vm_state building and task_state spawning. [ 1896.641571] env[63297]: DEBUG nova.compute.manager [req-c0c24917-1f13-49d2-bd3f-7f4d3f2eab0e req-538fcbfa-3e99-4a23-bd18-fae0613255bf service nova] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Detach interface failed, port_id=22445506-b44b-4648-8c7b-164ab284edb9, reason: Instance 1316bf99-cc93-4d1a-b31c-000dac095b3e could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1896.650311] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1896.650537] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1896.650706] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1896.650878] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1896.651069] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1896.651235] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1896.651456] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1896.651590] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1896.651750] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1896.651907] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1896.652099] env[63297]: DEBUG nova.virt.hardware [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1896.653020] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce4a1d6-f566-4672-9de9-d2f568b93b0c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.661805] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d325f3f6-8065-457f-929e-a24693e08727 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.749997] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1896.751286] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1896.751570] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleting the datastore file [datastore1] d7db24c1-35db-46d5-a406-fbb8c1c5d158 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1896.752131] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf5645a0-40c4-4e56-9f35-2c7de2c220bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.758189] env[63297]: DEBUG oslo_vmware.api [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for the task: (returnval){ [ 1896.758189] env[63297]: value = "task-1698463" [ 1896.758189] env[63297]: _type = "Task" [ 1896.758189] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.766477] env[63297]: DEBUG oslo_vmware.api [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.112024] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquiring lock "refresh_cache-4b2c0d59-5cdb-449d-8452-d1b0dea6d334" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.112386] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquired lock "refresh_cache-4b2c0d59-5cdb-449d-8452-d1b0dea6d334" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.112386] env[63297]: DEBUG nova.network.neutron [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1897.120047] env[63297]: DEBUG nova.compute.utils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1897.121763] env[63297]: DEBUG nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1897.121951] env[63297]: DEBUG nova.network.neutron [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1897.127566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.127886] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.128034] env[63297]: DEBUG nova.objects.instance [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lazy-loading 'resources' on Instance uuid 1316bf99-cc93-4d1a-b31c-000dac095b3e {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1897.162663] env[63297]: DEBUG nova.policy [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'acb339bdff424582a2aad0578cc255eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '612e80df32dc4fb39e2fcf28c0c0a80b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1897.267730] env[63297]: DEBUG oslo_vmware.api [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Task: {'id': task-1698463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124347} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.268081] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1897.268284] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1897.268553] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1897.268833] env[63297]: INFO nova.compute.manager [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Took 1.93 seconds to destroy the instance on the hypervisor. [ 1897.269108] env[63297]: DEBUG oslo.service.loopingcall [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1897.269315] env[63297]: DEBUG nova.compute.manager [-] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1897.269416] env[63297]: DEBUG nova.network.neutron [-] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1897.457928] env[63297]: DEBUG nova.network.neutron [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Successfully created port: 48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1897.625319] env[63297]: DEBUG nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1897.670712] env[63297]: DEBUG nova.network.neutron [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1897.706961] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c5c22d-da8a-4b50-85af-5feb60b6e596 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.714743] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e9b7b1-d61d-4a5e-a162-2c35a19dde2d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.751067] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990c395a-f43e-4928-97ad-f6ac4bff6551 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.763370] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36176f2-ce43-4daa-8163-aa3464724594 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.777929] env[63297]: DEBUG nova.compute.provider_tree [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1897.892627] env[63297]: DEBUG nova.network.neutron [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Updating instance_info_cache with network_info: [{"id": "86b9790e-5619-4103-beb9-6cbf03712484", "address": "fa:16:3e:9a:fc:c7", "network": {"id": "a20f2d2c-b125-4243-bb19-47925c76a53f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1192224391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a611a4dfaa0e4263903f31456f7c045b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b9790e-56", "ovs_interfaceid": "86b9790e-5619-4103-beb9-6cbf03712484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.927125] env[63297]: DEBUG nova.compute.manager [req-b2fab867-93fb-420d-8bba-5957eda2be3d req-c50f54b4-be45-48cf-81d2-0d3fc5d38cf2 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Received event network-vif-deleted-8b49c0c7-27b3-41da-b832-28195da8e8d1 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1897.927341] env[63297]: INFO nova.compute.manager [req-b2fab867-93fb-420d-8bba-5957eda2be3d req-c50f54b4-be45-48cf-81d2-0d3fc5d38cf2 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Neutron deleted interface 8b49c0c7-27b3-41da-b832-28195da8e8d1; detaching it from the instance and deleting it from the info cache [ 1897.927578] env[63297]: DEBUG nova.network.neutron [req-b2fab867-93fb-420d-8bba-5957eda2be3d req-c50f54b4-be45-48cf-81d2-0d3fc5d38cf2 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.098227] env[63297]: DEBUG nova.compute.manager [req-745c9b9b-a311-4957-8ee6-cc1d72667ee3 req-eccd7d4d-9382-4049-a8ea-801aaa410375 service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Received event network-changed-86b9790e-5619-4103-beb9-6cbf03712484 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1898.098446] env[63297]: DEBUG nova.compute.manager [req-745c9b9b-a311-4957-8ee6-cc1d72667ee3 req-eccd7d4d-9382-4049-a8ea-801aaa410375 service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Refreshing instance network info cache due to event network-changed-86b9790e-5619-4103-beb9-6cbf03712484. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1898.098671] env[63297]: DEBUG oslo_concurrency.lockutils [req-745c9b9b-a311-4957-8ee6-cc1d72667ee3 req-eccd7d4d-9382-4049-a8ea-801aaa410375 service nova] Acquiring lock "refresh_cache-4b2c0d59-5cdb-449d-8452-d1b0dea6d334" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.282986] env[63297]: DEBUG nova.scheduler.client.report [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1898.396220] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Releasing lock "refresh_cache-4b2c0d59-5cdb-449d-8452-d1b0dea6d334" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.396553] env[63297]: DEBUG nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Instance network_info: |[{"id": "86b9790e-5619-4103-beb9-6cbf03712484", "address": "fa:16:3e:9a:fc:c7", "network": {"id": "a20f2d2c-b125-4243-bb19-47925c76a53f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1192224391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a611a4dfaa0e4263903f31456f7c045b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b9790e-56", "ovs_interfaceid": "86b9790e-5619-4103-beb9-6cbf03712484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1898.396810] env[63297]: DEBUG nova.network.neutron [-] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.397937] env[63297]: DEBUG oslo_concurrency.lockutils [req-745c9b9b-a311-4957-8ee6-cc1d72667ee3 req-eccd7d4d-9382-4049-a8ea-801aaa410375 service nova] Acquired lock "refresh_cache-4b2c0d59-5cdb-449d-8452-d1b0dea6d334" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.398149] env[63297]: DEBUG nova.network.neutron [req-745c9b9b-a311-4957-8ee6-cc1d72667ee3 req-eccd7d4d-9382-4049-a8ea-801aaa410375 service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Refreshing network info cache for port 86b9790e-5619-4103-beb9-6cbf03712484 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1898.399847] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:fc:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86b9790e-5619-4103-beb9-6cbf03712484', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1898.406768] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Creating folder: Project (a611a4dfaa0e4263903f31456f7c045b). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1898.409921] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3082ee49-b9be-42f9-8cb7-c452d702dcc7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.422888] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Created folder: Project (a611a4dfaa0e4263903f31456f7c045b) in parent group-v353718. [ 1898.422888] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Creating folder: Instances. Parent ref: group-v354041. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1898.423200] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65712e5c-def2-4cf7-b854-b3557ee0926f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.431821] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Created folder: Instances in parent group-v354041. [ 1898.432170] env[63297]: DEBUG oslo.service.loopingcall [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1898.432430] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dda740b0-4187-488e-afd9-495c4f5de009 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.434082] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1898.434542] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-380437d9-8364-41ae-8428-75e0a2bda62d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.458088] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd8cd3e-2c70-47c5-9afe-d8f17c81a634 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.468164] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1898.468164] env[63297]: value = "task-1698466" [ 1898.468164] env[63297]: _type = "Task" [ 1898.468164] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.476930] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698466, 'name': CreateVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.481046] env[63297]: DEBUG nova.compute.manager [req-b2fab867-93fb-420d-8bba-5957eda2be3d req-c50f54b4-be45-48cf-81d2-0d3fc5d38cf2 service nova] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Detach interface failed, port_id=8b49c0c7-27b3-41da-b832-28195da8e8d1, reason: Instance d7db24c1-35db-46d5-a406-fbb8c1c5d158 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1898.618846] env[63297]: DEBUG nova.network.neutron [req-745c9b9b-a311-4957-8ee6-cc1d72667ee3 req-eccd7d4d-9382-4049-a8ea-801aaa410375 service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Updated VIF entry in instance network info cache for port 86b9790e-5619-4103-beb9-6cbf03712484. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1898.619263] env[63297]: DEBUG nova.network.neutron [req-745c9b9b-a311-4957-8ee6-cc1d72667ee3 req-eccd7d4d-9382-4049-a8ea-801aaa410375 service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Updating instance_info_cache with network_info: [{"id": "86b9790e-5619-4103-beb9-6cbf03712484", "address": "fa:16:3e:9a:fc:c7", "network": {"id": "a20f2d2c-b125-4243-bb19-47925c76a53f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1192224391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a611a4dfaa0e4263903f31456f7c045b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b9790e-56", "ovs_interfaceid": "86b9790e-5619-4103-beb9-6cbf03712484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.633822] env[63297]: DEBUG nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1898.659496] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1898.659731] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1898.659892] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1898.660088] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1898.660240] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1898.660455] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1898.660653] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1898.660832] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1898.661007] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1898.661182] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1898.661357] env[63297]: DEBUG nova.virt.hardware [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1898.662460] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2d520d-b520-458f-8f47-7568dd8288a2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.670680] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506703f2-29cd-425c-857c-59844b000ebf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.787566] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.808658] env[63297]: INFO nova.scheduler.client.report [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Deleted allocations for instance 1316bf99-cc93-4d1a-b31c-000dac095b3e [ 1898.908027] env[63297]: INFO nova.compute.manager [-] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Took 1.64 seconds to deallocate network for instance. [ 1898.978101] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698466, 'name': CreateVM_Task, 'duration_secs': 0.330939} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.978362] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1898.978979] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.979288] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.979488] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1898.979799] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b64b855f-4d6a-4f76-981b-7f03e105417c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.984797] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1898.984797] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528ba5bc-a015-d6b0-3486-9f8b4aa08e44" [ 1898.984797] env[63297]: _type = "Task" [ 1898.984797] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.991930] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528ba5bc-a015-d6b0-3486-9f8b4aa08e44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.121820] env[63297]: DEBUG oslo_concurrency.lockutils [req-745c9b9b-a311-4957-8ee6-cc1d72667ee3 req-eccd7d4d-9382-4049-a8ea-801aaa410375 service nova] Releasing lock "refresh_cache-4b2c0d59-5cdb-449d-8452-d1b0dea6d334" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.158921] env[63297]: DEBUG nova.network.neutron [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Successfully updated port: 48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1899.316474] env[63297]: DEBUG oslo_concurrency.lockutils [None req-13ef4e94-559d-47c2-8fc7-c4c4ac57ca85 tempest-ServerActionsTestOtherB-717361991 tempest-ServerActionsTestOtherB-717361991-project-member] Lock "1316bf99-cc93-4d1a-b31c-000dac095b3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.655s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.413856] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.414179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.414473] env[63297]: DEBUG nova.objects.instance [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lazy-loading 'resources' on Instance uuid d7db24c1-35db-46d5-a406-fbb8c1c5d158 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1899.495115] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528ba5bc-a015-d6b0-3486-9f8b4aa08e44, 'name': SearchDatastore_Task, 'duration_secs': 0.012765} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.495438] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.495677] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1899.495924] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1899.496087] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1899.496288] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1899.496558] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e702074-e339-46d3-bbb2-2414c889818d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.504646] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1899.504836] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1899.505576] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce34e0c6-c26d-43cf-836d-8ca1c400c85d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.510851] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1899.510851] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c679e2-4d20-269d-6ff1-c9080ebe9bb2" [ 1899.510851] env[63297]: _type = "Task" [ 1899.510851] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.519808] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c679e2-4d20-269d-6ff1-c9080ebe9bb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.661330] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1899.661470] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1899.661578] env[63297]: DEBUG nova.network.neutron [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1899.664470] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.664681] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.664817] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1899.967245] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa3211f-1e00-4b9a-a4c9-61cd0e222c73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.974881] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae5dac7-ef9a-4d37-9fe4-c693f97d8252 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.004276] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9b861e-a69a-49ac-a0bc-ad2b87f18d71 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.012057] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f158aa44-86b8-45ec-8813-3df496b8e248 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.023558] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52c679e2-4d20-269d-6ff1-c9080ebe9bb2, 'name': SearchDatastore_Task, 'duration_secs': 0.008597} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.031902] env[63297]: DEBUG nova.compute.provider_tree [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1900.033177] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be491da6-cc57-4523-85aa-f5c4aa702d15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.038955] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1900.038955] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b2a0c8-cd29-2303-6d49-f64422dbfed8" [ 1900.038955] env[63297]: _type = "Task" [ 1900.038955] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.047594] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b2a0c8-cd29-2303-6d49-f64422dbfed8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.122702] env[63297]: DEBUG nova.compute.manager [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Received event network-vif-plugged-48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1900.122702] env[63297]: DEBUG oslo_concurrency.lockutils [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] Acquiring lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.122702] env[63297]: DEBUG oslo_concurrency.lockutils [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.122702] env[63297]: DEBUG oslo_concurrency.lockutils [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.122702] env[63297]: DEBUG nova.compute.manager [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] No waiting events found dispatching network-vif-plugged-48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1900.122893] env[63297]: WARNING nova.compute.manager [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Received unexpected event network-vif-plugged-48409784-eed5-4e22-940f-e406d1b3af8a for instance with vm_state building and task_state spawning. [ 1900.123054] env[63297]: DEBUG nova.compute.manager [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Received event network-changed-48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1900.123121] env[63297]: DEBUG nova.compute.manager [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Refreshing instance network info cache due to event network-changed-48409784-eed5-4e22-940f-e406d1b3af8a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1900.123272] env[63297]: DEBUG oslo_concurrency.lockutils [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.213346] env[63297]: DEBUG nova.network.neutron [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1900.367634] env[63297]: DEBUG nova.network.neutron [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.537229] env[63297]: DEBUG nova.scheduler.client.report [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1900.549805] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b2a0c8-cd29-2303-6d49-f64422dbfed8, 'name': SearchDatastore_Task, 'duration_secs': 0.009199} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.550071] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1900.550322] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 4b2c0d59-5cdb-449d-8452-d1b0dea6d334/4b2c0d59-5cdb-449d-8452-d1b0dea6d334.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1900.550582] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a77ea3f-723e-4326-8f64-18f6dc719ead {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.558474] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1900.558474] env[63297]: value = "task-1698468" [ 1900.558474] env[63297]: _type = "Task" [ 1900.558474] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.566233] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698468, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.665305] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1900.870547] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1900.871190] env[63297]: DEBUG nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Instance network_info: |[{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1900.871553] env[63297]: DEBUG oslo_concurrency.lockutils [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.871869] env[63297]: DEBUG nova.network.neutron [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Refreshing network info cache for port 48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1900.873334] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:17:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '48409784-eed5-4e22-940f-e406d1b3af8a', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1900.883457] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Creating folder: Project (612e80df32dc4fb39e2fcf28c0c0a80b). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1900.884885] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-050b7d7c-e6b8-4614-adc4-50aa0dcb8db2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.897344] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Created folder: Project (612e80df32dc4fb39e2fcf28c0c0a80b) in parent group-v353718. [ 1900.897560] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Creating folder: Instances. Parent ref: group-v354044. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1900.897843] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3d4b9ab-efc9-4afe-be80-e25e00584cab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.907869] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Created folder: Instances in parent group-v354044. [ 1900.908204] env[63297]: DEBUG oslo.service.loopingcall [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1900.908446] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1900.908723] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e2444e5-81ce-4add-87c4-70bfe767d191 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.930203] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1900.930203] env[63297]: value = "task-1698471" [ 1900.930203] env[63297]: _type = "Task" [ 1900.930203] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.939056] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698471, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.045431] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.631s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.064259] env[63297]: INFO nova.scheduler.client.report [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Deleted allocations for instance d7db24c1-35db-46d5-a406-fbb8c1c5d158 [ 1901.070758] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698468, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448246} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.071194] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 4b2c0d59-5cdb-449d-8452-d1b0dea6d334/4b2c0d59-5cdb-449d-8452-d1b0dea6d334.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1901.071419] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1901.071658] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-18132def-71ca-4c7a-8638-e28343958fde {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.078360] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1901.078360] env[63297]: value = "task-1698472" [ 1901.078360] env[63297]: _type = "Task" [ 1901.078360] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.086094] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.440974] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698471, 'name': CreateVM_Task} progress is 25%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.574907] env[63297]: DEBUG oslo_concurrency.lockutils [None req-3d9bafe7-f92d-4b67-a1b0-8ff39b747dfb tempest-AttachVolumeShelveTestJSON-1694485513 tempest-AttachVolumeShelveTestJSON-1694485513-project-member] Lock "d7db24c1-35db-46d5-a406-fbb8c1c5d158" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.239s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.588916] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086411} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.590349] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1901.591264] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b47258-2338-4de9-8edd-dea5010bb211 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.615295] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 4b2c0d59-5cdb-449d-8452-d1b0dea6d334/4b2c0d59-5cdb-449d-8452-d1b0dea6d334.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1901.617841] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9488001-ad85-4491-a4aa-0310f79b74c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.637059] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1901.637059] env[63297]: value = "task-1698473" [ 1901.637059] env[63297]: _type = "Task" [ 1901.637059] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.644742] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.665560] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1901.756157] env[63297]: DEBUG nova.network.neutron [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updated VIF entry in instance network info cache for port 48409784-eed5-4e22-940f-e406d1b3af8a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1901.756565] env[63297]: DEBUG nova.network.neutron [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.941690] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698471, 'name': CreateVM_Task, 'duration_secs': 0.661064} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.941916] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1901.942654] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.942826] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.943420] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1901.943420] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed65fcdc-189c-4e95-9c14-61d4d6bca4d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.948203] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1901.948203] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]528caef6-e845-e8f7-0544-440fb0ba6d89" [ 1901.948203] env[63297]: _type = "Task" [ 1901.948203] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.956048] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528caef6-e845-e8f7-0544-440fb0ba6d89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.146198] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698473, 'name': ReconfigVM_Task, 'duration_secs': 0.261458} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.146498] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 4b2c0d59-5cdb-449d-8452-d1b0dea6d334/4b2c0d59-5cdb-449d-8452-d1b0dea6d334.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1902.147206] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9d88fa3-7798-45fa-ba00-f58ae0815fc4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.153551] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1902.153551] env[63297]: value = "task-1698474" [ 1902.153551] env[63297]: _type = "Task" [ 1902.153551] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.161151] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698474, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.168080] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.168293] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.168453] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.168602] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1902.169339] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6ffc93-bb95-4ceb-b500-ea7291a3e553 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.176858] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853755e8-d56a-4a62-8969-826afeda292c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.192375] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836b48bf-448f-4d04-a2d7-f2d6b791e026 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.199560] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c3f0c8-5988-45d7-bc87-87f1c725829e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.229824] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180652MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1902.230032] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.230177] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.260347] env[63297]: DEBUG oslo_concurrency.lockutils [req-46cfcdb8-65cd-4c55-93c9-515d49928b50 req-05fd577c-4420-45d8-b085-3bfe0d4ad9d8 service nova] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.457895] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]528caef6-e845-e8f7-0544-440fb0ba6d89, 'name': SearchDatastore_Task, 'duration_secs': 0.009374} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.458242] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.458422] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1902.458690] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.458867] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.459069] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1902.459320] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-788a29e6-1d53-43c6-9618-4dea6d014760 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.467020] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1902.467199] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1902.467882] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7894bcb1-fd9c-47c8-b15e-c6c6d245c0cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.472572] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1902.472572] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d4d30-603a-7562-0280-183bca31d782" [ 1902.472572] env[63297]: _type = "Task" [ 1902.472572] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.479622] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d4d30-603a-7562-0280-183bca31d782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.663882] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698474, 'name': Rename_Task, 'duration_secs': 0.168349} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.664180] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1902.664421] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-396dfe8d-6eb9-4d30-9ffc-98fbd774f0dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.670642] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1902.670642] env[63297]: value = "task-1698475" [ 1902.670642] env[63297]: _type = "Task" [ 1902.670642] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.677972] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.984038] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d4d30-603a-7562-0280-183bca31d782, 'name': SearchDatastore_Task, 'duration_secs': 0.007571} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.984797] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59d35994-9c20-4b2d-9937-21ec86110651 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.990522] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1902.990522] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]524e94b7-6380-e165-6044-b8a59cfdd3e0" [ 1902.990522] env[63297]: _type = "Task" [ 1902.990522] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.998054] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524e94b7-6380-e165-6044-b8a59cfdd3e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.181135] env[63297]: DEBUG oslo_vmware.api [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698475, 'name': PowerOnVM_Task, 'duration_secs': 0.460886} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.181434] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1903.182664] env[63297]: INFO nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Took 6.57 seconds to spawn the instance on the hypervisor. [ 1903.182664] env[63297]: DEBUG nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1903.182664] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384533c6-4dcf-467b-83d4-fe819ff176bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.258764] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 4b2c0d59-5cdb-449d-8452-d1b0dea6d334 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.258952] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 3fca16fa-0768-4ea8-87f2-b5a37898cdfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.259157] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1903.259332] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1903.300220] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9e0fed-e340-4884-bd95-d61a2f430a26 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.308479] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f916d28-6131-4d7b-ba7f-515d3f640607 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.339835] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf40423a-cc29-4f66-9e6e-fcc60c8bf479 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.347369] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64ef900-cb0a-42ec-8962-03264ea92e31 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.363047] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1903.501070] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]524e94b7-6380-e165-6044-b8a59cfdd3e0, 'name': SearchDatastore_Task, 'duration_secs': 0.008607} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.501367] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.501596] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 3fca16fa-0768-4ea8-87f2-b5a37898cdfa/3fca16fa-0768-4ea8-87f2-b5a37898cdfa.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1903.502211] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df547c81-54df-49a4-9367-6db87b809a99 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.509085] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1903.509085] env[63297]: value = "task-1698477" [ 1903.509085] env[63297]: _type = "Task" [ 1903.509085] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.517275] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698477, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.702071] env[63297]: INFO nova.compute.manager [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Took 12.77 seconds to build instance. [ 1903.904127] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 179 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1903.904384] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 179 to 180 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1903.904479] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1904.019542] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698477, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.204450] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5442fd69-f6bd-4deb-8ff7-df9921c6c558 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.278s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.410429] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1904.410429] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.180s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.480265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquiring lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.481376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.481376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquiring lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.481376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.481376] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.483352] env[63297]: INFO nova.compute.manager [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Terminating instance [ 1904.485117] env[63297]: DEBUG nova.compute.manager [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1904.485305] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1904.486132] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbb0410-2eef-437a-ac82-45eb998c6250 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.493805] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1904.493996] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0bf4c80-fa1c-402a-835d-fcd9f1ee5452 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.501043] env[63297]: DEBUG oslo_vmware.api [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1904.501043] env[63297]: value = "task-1698478" [ 1904.501043] env[63297]: _type = "Task" [ 1904.501043] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.510765] env[63297]: DEBUG oslo_vmware.api [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698478, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.518301] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698477, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.014624] env[63297]: DEBUG oslo_vmware.api [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698478, 'name': PowerOffVM_Task, 'duration_secs': 0.23767} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.019897] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1905.020339] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1905.021090] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f211844-f1de-407a-ba1a-32648e3f9cf8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.030220] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698477, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.477424} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.030695] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 3fca16fa-0768-4ea8-87f2-b5a37898cdfa/3fca16fa-0768-4ea8-87f2-b5a37898cdfa.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1905.031576] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1905.031576] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ec1b8188-e103-4f91-a629-df4178d819d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.038848] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1905.038848] env[63297]: value = "task-1698480" [ 1905.038848] env[63297]: _type = "Task" [ 1905.038848] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.051080] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698480, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.407224] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.407513] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.407669] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1905.548653] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698480, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069854} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.549063] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1905.550219] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4d427c-6a09-4fa8-90c4-db7dd769f99e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.582233] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 3fca16fa-0768-4ea8-87f2-b5a37898cdfa/3fca16fa-0768-4ea8-87f2-b5a37898cdfa.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1905.582795] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29b0e91c-8376-4032-912a-91d16af76e1f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.607482] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1905.607482] env[63297]: value = "task-1698481" [ 1905.607482] env[63297]: _type = "Task" [ 1905.607482] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.616408] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698481, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.911625] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1905.911921] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.912112] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1906.121214] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698481, 'name': ReconfigVM_Task, 'duration_secs': 0.260419} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.121214] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 3fca16fa-0768-4ea8-87f2-b5a37898cdfa/3fca16fa-0768-4ea8-87f2-b5a37898cdfa.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1906.121764] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec23348e-a998-4c8f-8cad-ebb9b75a8519 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.128857] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1906.128857] env[63297]: value = "task-1698482" [ 1906.128857] env[63297]: _type = "Task" [ 1906.128857] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.138171] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698482, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.644524] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698482, 'name': Rename_Task, 'duration_secs': 0.146564} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.644987] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1906.645327] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8e677c4-9263-4d24-8551-216f6e2b440c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.655182] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1906.655182] env[63297]: value = "task-1698483" [ 1906.655182] env[63297]: _type = "Task" [ 1906.655182] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.664729] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698483, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.000496] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquiring lock "c3dbab93-1ac5-4c0c-862d-88638a460198" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.000665] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "c3dbab93-1ac5-4c0c-862d-88638a460198" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.165417] env[63297]: DEBUG oslo_vmware.api [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698483, 'name': PowerOnVM_Task, 'duration_secs': 0.428418} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.165728] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1907.166021] env[63297]: INFO nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Took 8.53 seconds to spawn the instance on the hypervisor. [ 1907.166263] env[63297]: DEBUG nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1907.167115] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2458a83e-5f6b-4726-bba2-d9150c299994 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.504900] env[63297]: DEBUG nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1907.686697] env[63297]: INFO nova.compute.manager [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Took 14.85 seconds to build instance. [ 1908.032441] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.032780] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.034401] env[63297]: INFO nova.compute.claims [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1908.188517] env[63297]: DEBUG oslo_concurrency.lockutils [None req-e031f3d6-ec3e-451f-bb8a-5f824fa481f9 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.360s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.436417] env[63297]: DEBUG nova.compute.manager [req-8aa1dab1-f5a6-4d34-ac9c-7612ac738d0d req-c7fb96e5-c0e2-4cdd-aecb-45e25ce61c19 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Received event network-changed-48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1908.436417] env[63297]: DEBUG nova.compute.manager [req-8aa1dab1-f5a6-4d34-ac9c-7612ac738d0d req-c7fb96e5-c0e2-4cdd-aecb-45e25ce61c19 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Refreshing instance network info cache due to event network-changed-48409784-eed5-4e22-940f-e406d1b3af8a. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1908.436716] env[63297]: DEBUG oslo_concurrency.lockutils [req-8aa1dab1-f5a6-4d34-ac9c-7612ac738d0d req-c7fb96e5-c0e2-4cdd-aecb-45e25ce61c19 service nova] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.436756] env[63297]: DEBUG oslo_concurrency.lockutils [req-8aa1dab1-f5a6-4d34-ac9c-7612ac738d0d req-c7fb96e5-c0e2-4cdd-aecb-45e25ce61c19 service nova] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.436910] env[63297]: DEBUG nova.network.neutron [req-8aa1dab1-f5a6-4d34-ac9c-7612ac738d0d req-c7fb96e5-c0e2-4cdd-aecb-45e25ce61c19 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Refreshing network info cache for port 48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1909.097347] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c409ed14-6231-4dcb-95fd-12f073943efa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.107606] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb84a6b-fd01-42fa-8b9d-a84f13aa071c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.141468] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99221af7-a1de-4003-a78b-7a5f776cb767 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.149108] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c22da4-e400-4979-b6eb-58a93159f8db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.162423] env[63297]: DEBUG nova.compute.provider_tree [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1909.180971] env[63297]: DEBUG nova.network.neutron [req-8aa1dab1-f5a6-4d34-ac9c-7612ac738d0d req-c7fb96e5-c0e2-4cdd-aecb-45e25ce61c19 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updated VIF entry in instance network info cache for port 48409784-eed5-4e22-940f-e406d1b3af8a. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1909.181325] env[63297]: DEBUG nova.network.neutron [req-8aa1dab1-f5a6-4d34-ac9c-7612ac738d0d req-c7fb96e5-c0e2-4cdd-aecb-45e25ce61c19 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.665969] env[63297]: DEBUG nova.scheduler.client.report [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1909.683526] env[63297]: DEBUG oslo_concurrency.lockutils [req-8aa1dab1-f5a6-4d34-ac9c-7612ac738d0d req-c7fb96e5-c0e2-4cdd-aecb-45e25ce61c19 service nova] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.170952] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.138s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.171563] env[63297]: DEBUG nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1910.676316] env[63297]: DEBUG nova.compute.utils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1910.678069] env[63297]: DEBUG nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1910.678237] env[63297]: DEBUG nova.network.neutron [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1910.713882] env[63297]: DEBUG nova.policy [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c42c6933d6e44a7897d660fd33dac71a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '556a6b00cba24582a37483106c071576', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 1911.003095] env[63297]: DEBUG nova.network.neutron [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Successfully created port: 89b91429-b871-44dc-8b83-40eaa3373e5e {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1911.181283] env[63297]: DEBUG nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1911.323323] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1911.323476] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1911.323674] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Deleting the datastore file [datastore1] 4b2c0d59-5cdb-449d-8452-d1b0dea6d334 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1911.323955] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d0e23cb-b553-4c4f-b791-93a55771d6ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.331326] env[63297]: DEBUG oslo_vmware.api [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for the task: (returnval){ [ 1911.331326] env[63297]: value = "task-1698484" [ 1911.331326] env[63297]: _type = "Task" [ 1911.331326] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.339570] env[63297]: DEBUG oslo_vmware.api [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698484, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.841974] env[63297]: DEBUG oslo_vmware.api [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Task: {'id': task-1698484, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125699} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.842244] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1911.842430] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1911.842604] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1911.842771] env[63297]: INFO nova.compute.manager [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Took 7.36 seconds to destroy the instance on the hypervisor. [ 1911.843028] env[63297]: DEBUG oslo.service.loopingcall [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1911.843241] env[63297]: DEBUG nova.compute.manager [-] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1911.843334] env[63297]: DEBUG nova.network.neutron [-] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1912.074758] env[63297]: DEBUG nova.compute.manager [req-6cb8c93a-e80a-4afe-9d79-7b32b20a17b8 req-1df90a10-a82e-4933-80e7-345411b21ebb service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Received event network-vif-deleted-86b9790e-5619-4103-beb9-6cbf03712484 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1912.074987] env[63297]: INFO nova.compute.manager [req-6cb8c93a-e80a-4afe-9d79-7b32b20a17b8 req-1df90a10-a82e-4933-80e7-345411b21ebb service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Neutron deleted interface 86b9790e-5619-4103-beb9-6cbf03712484; detaching it from the instance and deleting it from the info cache [ 1912.075176] env[63297]: DEBUG nova.network.neutron [req-6cb8c93a-e80a-4afe-9d79-7b32b20a17b8 req-1df90a10-a82e-4933-80e7-345411b21ebb service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.191580] env[63297]: DEBUG nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1912.218768] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1912.219104] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1912.219295] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1912.219484] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1912.219629] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1912.219775] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1912.219977] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1912.220188] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1912.220399] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1912.220570] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1912.220743] env[63297]: DEBUG nova.virt.hardware [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1912.221853] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babd3ec2-ce29-448e-b3f9-836f2ba22f4b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.229324] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e423c4-5ba1-43ca-a08c-9a34bbd5b416 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.388529] env[63297]: DEBUG nova.compute.manager [req-1fef16b1-35bb-4a0a-adc7-c1792fb9ead1 req-a6bb74fe-3a96-4deb-bfcc-0332acf67ba7 service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Received event network-vif-plugged-89b91429-b871-44dc-8b83-40eaa3373e5e {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1912.388815] env[63297]: DEBUG oslo_concurrency.lockutils [req-1fef16b1-35bb-4a0a-adc7-c1792fb9ead1 req-a6bb74fe-3a96-4deb-bfcc-0332acf67ba7 service nova] Acquiring lock "c3dbab93-1ac5-4c0c-862d-88638a460198-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.388968] env[63297]: DEBUG oslo_concurrency.lockutils [req-1fef16b1-35bb-4a0a-adc7-c1792fb9ead1 req-a6bb74fe-3a96-4deb-bfcc-0332acf67ba7 service nova] Lock "c3dbab93-1ac5-4c0c-862d-88638a460198-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.389156] env[63297]: DEBUG oslo_concurrency.lockutils [req-1fef16b1-35bb-4a0a-adc7-c1792fb9ead1 req-a6bb74fe-3a96-4deb-bfcc-0332acf67ba7 service nova] Lock "c3dbab93-1ac5-4c0c-862d-88638a460198-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.389385] env[63297]: DEBUG nova.compute.manager [req-1fef16b1-35bb-4a0a-adc7-c1792fb9ead1 req-a6bb74fe-3a96-4deb-bfcc-0332acf67ba7 service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] No waiting events found dispatching network-vif-plugged-89b91429-b871-44dc-8b83-40eaa3373e5e {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1912.389602] env[63297]: WARNING nova.compute.manager [req-1fef16b1-35bb-4a0a-adc7-c1792fb9ead1 req-a6bb74fe-3a96-4deb-bfcc-0332acf67ba7 service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Received unexpected event network-vif-plugged-89b91429-b871-44dc-8b83-40eaa3373e5e for instance with vm_state building and task_state spawning. [ 1912.473652] env[63297]: DEBUG nova.network.neutron [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Successfully updated port: 89b91429-b871-44dc-8b83-40eaa3373e5e {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1912.554630] env[63297]: DEBUG nova.network.neutron [-] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.578061] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98bcf843-0744-4108-bd1d-324543fa54ee {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.588369] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476a5350-9a83-425f-80e8-fbf7a94720cc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.612478] env[63297]: DEBUG nova.compute.manager [req-6cb8c93a-e80a-4afe-9d79-7b32b20a17b8 req-1df90a10-a82e-4933-80e7-345411b21ebb service nova] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Detach interface failed, port_id=86b9790e-5619-4103-beb9-6cbf03712484, reason: Instance 4b2c0d59-5cdb-449d-8452-d1b0dea6d334 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1912.975591] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquiring lock "refresh_cache-c3dbab93-1ac5-4c0c-862d-88638a460198" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.975795] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquired lock "refresh_cache-c3dbab93-1ac5-4c0c-862d-88638a460198" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.975935] env[63297]: DEBUG nova.network.neutron [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1913.057198] env[63297]: INFO nova.compute.manager [-] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Took 1.21 seconds to deallocate network for instance. [ 1913.520510] env[63297]: DEBUG nova.network.neutron [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1913.563887] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.564312] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.564654] env[63297]: DEBUG nova.objects.instance [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lazy-loading 'resources' on Instance uuid 4b2c0d59-5cdb-449d-8452-d1b0dea6d334 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.663374] env[63297]: DEBUG nova.network.neutron [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Updating instance_info_cache with network_info: [{"id": "89b91429-b871-44dc-8b83-40eaa3373e5e", "address": "fa:16:3e:80:be:63", "network": {"id": "a15332e8-2493-4707-a6b2-38aaf19dcfb9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1165707005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "556a6b00cba24582a37483106c071576", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b91429-b8", "ovs_interfaceid": "89b91429-b871-44dc-8b83-40eaa3373e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.126309] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120648a8-b7ce-42f0-85df-c74fb03a77db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.134911] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cf87a5-e14c-4d4b-986c-68550e4c32aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.165959] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e68a640-1ce1-41cf-bf7f-7c5fde545dc8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.168843] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Releasing lock "refresh_cache-c3dbab93-1ac5-4c0c-862d-88638a460198" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.169171] env[63297]: DEBUG nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Instance network_info: |[{"id": "89b91429-b871-44dc-8b83-40eaa3373e5e", "address": "fa:16:3e:80:be:63", "network": {"id": "a15332e8-2493-4707-a6b2-38aaf19dcfb9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1165707005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "556a6b00cba24582a37483106c071576", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b91429-b8", "ovs_interfaceid": "89b91429-b871-44dc-8b83-40eaa3373e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1914.169625] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:be:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89b91429-b871-44dc-8b83-40eaa3373e5e', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1914.178016] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Creating folder: Project (556a6b00cba24582a37483106c071576). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1914.178695] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f349c75-42ad-4129-9b3f-dc17dba43350 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.183827] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2239fb46-5f63-4a94-8659-3a0d1b4a988e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.188889] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Created folder: Project (556a6b00cba24582a37483106c071576) in parent group-v353718. [ 1914.189094] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Creating folder: Instances. Parent ref: group-v354047. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1914.189306] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27544a66-c956-49b0-9312-1fb43a67d084 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.198396] env[63297]: DEBUG nova.compute.provider_tree [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1914.208425] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Created folder: Instances in parent group-v354047. [ 1914.208658] env[63297]: DEBUG oslo.service.loopingcall [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1914.208842] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1914.209045] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3096fbef-17e6-455e-9f6f-6b6604be118c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.228406] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1914.228406] env[63297]: value = "task-1698487" [ 1914.228406] env[63297]: _type = "Task" [ 1914.228406] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.239073] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698487, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.415014] env[63297]: DEBUG nova.compute.manager [req-c139b8ef-a830-4535-ad62-15b6d7ca7729 req-17fa04a9-9eee-4467-86a9-43f6d2bf97ae service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Received event network-changed-89b91429-b871-44dc-8b83-40eaa3373e5e {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1914.415257] env[63297]: DEBUG nova.compute.manager [req-c139b8ef-a830-4535-ad62-15b6d7ca7729 req-17fa04a9-9eee-4467-86a9-43f6d2bf97ae service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Refreshing instance network info cache due to event network-changed-89b91429-b871-44dc-8b83-40eaa3373e5e. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1914.415484] env[63297]: DEBUG oslo_concurrency.lockutils [req-c139b8ef-a830-4535-ad62-15b6d7ca7729 req-17fa04a9-9eee-4467-86a9-43f6d2bf97ae service nova] Acquiring lock "refresh_cache-c3dbab93-1ac5-4c0c-862d-88638a460198" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.415631] env[63297]: DEBUG oslo_concurrency.lockutils [req-c139b8ef-a830-4535-ad62-15b6d7ca7729 req-17fa04a9-9eee-4467-86a9-43f6d2bf97ae service nova] Acquired lock "refresh_cache-c3dbab93-1ac5-4c0c-862d-88638a460198" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.415807] env[63297]: DEBUG nova.network.neutron [req-c139b8ef-a830-4535-ad62-15b6d7ca7729 req-17fa04a9-9eee-4467-86a9-43f6d2bf97ae service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Refreshing network info cache for port 89b91429-b871-44dc-8b83-40eaa3373e5e {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1914.701987] env[63297]: DEBUG nova.scheduler.client.report [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1914.738814] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698487, 'name': CreateVM_Task, 'duration_secs': 0.323335} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.739044] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1914.739704] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.739819] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.740156] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1914.740408] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3586c9db-07cd-4dc1-82e5-c7435c52a1b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.745153] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1914.745153] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5264d52e-8b8b-8316-7b6b-837c4d6515b2" [ 1914.745153] env[63297]: _type = "Task" [ 1914.745153] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.752653] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5264d52e-8b8b-8316-7b6b-837c4d6515b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.099501] env[63297]: DEBUG nova.network.neutron [req-c139b8ef-a830-4535-ad62-15b6d7ca7729 req-17fa04a9-9eee-4467-86a9-43f6d2bf97ae service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Updated VIF entry in instance network info cache for port 89b91429-b871-44dc-8b83-40eaa3373e5e. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1915.099869] env[63297]: DEBUG nova.network.neutron [req-c139b8ef-a830-4535-ad62-15b6d7ca7729 req-17fa04a9-9eee-4467-86a9-43f6d2bf97ae service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Updating instance_info_cache with network_info: [{"id": "89b91429-b871-44dc-8b83-40eaa3373e5e", "address": "fa:16:3e:80:be:63", "network": {"id": "a15332e8-2493-4707-a6b2-38aaf19dcfb9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1165707005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "556a6b00cba24582a37483106c071576", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b91429-b8", "ovs_interfaceid": "89b91429-b871-44dc-8b83-40eaa3373e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.207304] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.643s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.229935] env[63297]: INFO nova.scheduler.client.report [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Deleted allocations for instance 4b2c0d59-5cdb-449d-8452-d1b0dea6d334 [ 1915.255878] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5264d52e-8b8b-8316-7b6b-837c4d6515b2, 'name': SearchDatastore_Task, 'duration_secs': 0.009458} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.256192] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.256423] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1915.256652] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.256798] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.256976] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1915.257318] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff734e00-e9ef-4c88-9cbd-36b18316f02a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.265886] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1915.266098] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1915.266844] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-149dd344-c9bb-4f49-9534-22f38391c1df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.272048] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1915.272048] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5269cd7d-4ff5-fb45-e93c-74cd9dbe6796" [ 1915.272048] env[63297]: _type = "Task" [ 1915.272048] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.281322] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5269cd7d-4ff5-fb45-e93c-74cd9dbe6796, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.603041] env[63297]: DEBUG oslo_concurrency.lockutils [req-c139b8ef-a830-4535-ad62-15b6d7ca7729 req-17fa04a9-9eee-4467-86a9-43f6d2bf97ae service nova] Releasing lock "refresh_cache-c3dbab93-1ac5-4c0c-862d-88638a460198" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.736743] env[63297]: DEBUG oslo_concurrency.lockutils [None req-208d6c6f-04b6-42e5-b7a1-5bccbd8ffca0 tempest-ServerAddressesTestJSON-2102271340 tempest-ServerAddressesTestJSON-2102271340-project-member] Lock "4b2c0d59-5cdb-449d-8452-d1b0dea6d334" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.256s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.782435] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5269cd7d-4ff5-fb45-e93c-74cd9dbe6796, 'name': SearchDatastore_Task, 'duration_secs': 0.009143} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.783196] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-001a5949-76ff-423e-b188-b114a09e0f3a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.788174] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1915.788174] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52906f6b-7344-e4ff-835e-189a42b2d7dc" [ 1915.788174] env[63297]: _type = "Task" [ 1915.788174] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.795565] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52906f6b-7344-e4ff-835e-189a42b2d7dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.299309] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52906f6b-7344-e4ff-835e-189a42b2d7dc, 'name': SearchDatastore_Task, 'duration_secs': 0.008647} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.299309] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.301890] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c3dbab93-1ac5-4c0c-862d-88638a460198/c3dbab93-1ac5-4c0c-862d-88638a460198.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1916.301890] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d974143-7019-4ebf-8cf1-3edb11c6acdf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.307047] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1916.307047] env[63297]: value = "task-1698488" [ 1916.307047] env[63297]: _type = "Task" [ 1916.307047] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.314856] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.817161] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698488, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.415029} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.817478] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] c3dbab93-1ac5-4c0c-862d-88638a460198/c3dbab93-1ac5-4c0c-862d-88638a460198.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1916.817638] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1916.817876] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-909fdd5f-eb72-4f9c-a74c-5cf04a4770fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.825463] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1916.825463] env[63297]: value = "task-1698489" [ 1916.825463] env[63297]: _type = "Task" [ 1916.825463] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.833811] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698489, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.334939] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698489, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066878} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.335318] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1917.336097] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd422bae-b160-4183-b25d-65bfafdf1635 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.361032] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] c3dbab93-1ac5-4c0c-862d-88638a460198/c3dbab93-1ac5-4c0c-862d-88638a460198.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1917.361343] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73e1ba42-d4d3-4eee-b53c-fc874522d24c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.380611] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1917.380611] env[63297]: value = "task-1698490" [ 1917.380611] env[63297]: _type = "Task" [ 1917.380611] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.388660] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698490, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.892709] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698490, 'name': ReconfigVM_Task, 'duration_secs': 0.282369} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.893516] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Reconfigured VM instance instance-00000078 to attach disk [datastore1] c3dbab93-1ac5-4c0c-862d-88638a460198/c3dbab93-1ac5-4c0c-862d-88638a460198.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1917.894436] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64f41f3d-359d-4de8-8f40-7cf18661984b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.901398] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1917.901398] env[63297]: value = "task-1698491" [ 1917.901398] env[63297]: _type = "Task" [ 1917.901398] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.909724] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698491, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.411430] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698491, 'name': Rename_Task, 'duration_secs': 0.136525} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.411614] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1918.411834] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95f0a2ee-0e74-43e1-ad95-d2b32649cbcd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.418059] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1918.418059] env[63297]: value = "task-1698492" [ 1918.418059] env[63297]: _type = "Task" [ 1918.418059] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.425113] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.927767] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698492, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.428233] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698492, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.928723] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698492, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.429914] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698492, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.930386] env[63297]: DEBUG oslo_vmware.api [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698492, 'name': PowerOnVM_Task, 'duration_secs': 2.16354} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.930663] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1920.930850] env[63297]: INFO nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1920.931038] env[63297]: DEBUG nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1920.931840] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4195d8a7-ba62-4a6e-8495-67ccbac28970 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.447305] env[63297]: INFO nova.compute.manager [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Took 13.43 seconds to build instance. [ 1921.949787] env[63297]: DEBUG oslo_concurrency.lockutils [None req-05e4ee3d-2f6b-4dc6-9c9e-02e385300c35 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "c3dbab93-1ac5-4c0c-862d-88638a460198" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.949s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.367250] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquiring lock "c3dbab93-1ac5-4c0c-862d-88638a460198" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.367650] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "c3dbab93-1ac5-4c0c-862d-88638a460198" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.367745] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquiring lock "c3dbab93-1ac5-4c0c-862d-88638a460198-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.368363] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "c3dbab93-1ac5-4c0c-862d-88638a460198-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.368572] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "c3dbab93-1ac5-4c0c-862d-88638a460198-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.370662] env[63297]: INFO nova.compute.manager [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Terminating instance [ 1923.372420] env[63297]: DEBUG nova.compute.manager [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1923.372635] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1923.373490] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf8bbd7-8911-455e-aa0e-21379580c6d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.381547] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1923.381745] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82f18e3b-a1b9-42bc-9961-19f13ac0db0f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.387865] env[63297]: DEBUG oslo_vmware.api [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1923.387865] env[63297]: value = "task-1698493" [ 1923.387865] env[63297]: _type = "Task" [ 1923.387865] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.395896] env[63297]: DEBUG oslo_vmware.api [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698493, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.898077] env[63297]: DEBUG oslo_vmware.api [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698493, 'name': PowerOffVM_Task, 'duration_secs': 0.184084} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.898290] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1923.898456] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1923.898704] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7943a7a4-f9db-4bfd-8951-5abeaa207128 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.078029] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1924.078279] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1924.078466] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Deleting the datastore file [datastore1] c3dbab93-1ac5-4c0c-862d-88638a460198 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1924.078768] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f480bd7-1199-4a3a-8b68-f5998aaf1572 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.085502] env[63297]: DEBUG oslo_vmware.api [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for the task: (returnval){ [ 1924.085502] env[63297]: value = "task-1698495" [ 1924.085502] env[63297]: _type = "Task" [ 1924.085502] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.093729] env[63297]: DEBUG oslo_vmware.api [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.595390] env[63297]: DEBUG oslo_vmware.api [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Task: {'id': task-1698495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144317} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.595760] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1924.595820] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1924.595999] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1924.596188] env[63297]: INFO nova.compute.manager [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1924.596422] env[63297]: DEBUG oslo.service.loopingcall [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1924.596611] env[63297]: DEBUG nova.compute.manager [-] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1924.596707] env[63297]: DEBUG nova.network.neutron [-] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1924.833090] env[63297]: DEBUG nova.compute.manager [req-316502da-43c1-4da8-84b6-b3690919277f req-3dcbe3c8-c889-473f-ad94-00c61aae98db service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Received event network-vif-deleted-89b91429-b871-44dc-8b83-40eaa3373e5e {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1924.833302] env[63297]: INFO nova.compute.manager [req-316502da-43c1-4da8-84b6-b3690919277f req-3dcbe3c8-c889-473f-ad94-00c61aae98db service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Neutron deleted interface 89b91429-b871-44dc-8b83-40eaa3373e5e; detaching it from the instance and deleting it from the info cache [ 1924.833444] env[63297]: DEBUG nova.network.neutron [req-316502da-43c1-4da8-84b6-b3690919277f req-3dcbe3c8-c889-473f-ad94-00c61aae98db service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.312781] env[63297]: DEBUG nova.network.neutron [-] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.337178] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2f21877-7b9f-4b42-9904-d4918c0455c9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.347532] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996b2d09-47e9-4b5d-b3f3-79ebf2f9dfcf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.370868] env[63297]: DEBUG nova.compute.manager [req-316502da-43c1-4da8-84b6-b3690919277f req-3dcbe3c8-c889-473f-ad94-00c61aae98db service nova] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Detach interface failed, port_id=89b91429-b871-44dc-8b83-40eaa3373e5e, reason: Instance c3dbab93-1ac5-4c0c-862d-88638a460198 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1925.815791] env[63297]: INFO nova.compute.manager [-] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Took 1.22 seconds to deallocate network for instance. [ 1926.322272] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.322499] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.322736] env[63297]: DEBUG nova.objects.instance [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lazy-loading 'resources' on Instance uuid c3dbab93-1ac5-4c0c-862d-88638a460198 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1926.865123] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99b814a-f194-44da-b184-783ce2959abf {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.872843] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544ed73c-383f-4cea-b456-663019324ac2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.901666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa21f456-cb6c-40c4-b64f-fa2f07a02837 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.908859] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52518a37-bce0-4d13-8ffc-0013886ebffe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.921274] env[63297]: DEBUG nova.compute.provider_tree [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.424081] env[63297]: DEBUG nova.scheduler.client.report [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1927.929838] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.947673] env[63297]: INFO nova.scheduler.client.report [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Deleted allocations for instance c3dbab93-1ac5-4c0c-862d-88638a460198 [ 1928.455706] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02cb9d13-5a37-4530-9307-6d1a81f777c6 tempest-ServerTagsTestJSON-1300514436 tempest-ServerTagsTestJSON-1300514436-project-member] Lock "c3dbab93-1ac5-4c0c-862d-88638a460198" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.088s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.852077] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.852531] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.852592] env[63297]: DEBUG nova.compute.manager [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1946.853536] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc97b605-935e-472f-9961-11e4f692ab3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.860812] env[63297]: DEBUG nova.compute.manager [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1946.861445] env[63297]: DEBUG nova.objects.instance [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'flavor' on Instance uuid 3fca16fa-0768-4ea8-87f2-b5a37898cdfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1947.368588] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1947.368865] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ae31bf4-671c-4a9a-b97d-3c1baabd3e28 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.376295] env[63297]: DEBUG oslo_vmware.api [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1947.376295] env[63297]: value = "task-1698496" [ 1947.376295] env[63297]: _type = "Task" [ 1947.376295] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.384430] env[63297]: DEBUG oslo_vmware.api [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.890076] env[63297]: DEBUG oslo_vmware.api [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698496, 'name': PowerOffVM_Task, 'duration_secs': 0.185794} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.890076] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1947.890076] env[63297]: DEBUG nova.compute.manager [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1947.890076] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d371389b-b856-4aea-92fd-ef0f09ad83de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.400224] env[63297]: DEBUG oslo_concurrency.lockutils [None req-02190c12-1186-4da0-a4eb-93c6042bd5dc tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.169987] env[63297]: DEBUG nova.objects.instance [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'flavor' on Instance uuid 3fca16fa-0768-4ea8-87f2-b5a37898cdfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1949.674933] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.675149] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.675326] env[63297]: DEBUG nova.network.neutron [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1949.675505] env[63297]: DEBUG nova.objects.instance [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'info_cache' on Instance uuid 3fca16fa-0768-4ea8-87f2-b5a37898cdfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1950.179541] env[63297]: DEBUG nova.objects.base [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Object Instance<3fca16fa-0768-4ea8-87f2-b5a37898cdfa> lazy-loaded attributes: flavor,info_cache {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1950.891385] env[63297]: DEBUG nova.network.neutron [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.393949] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.897784] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1951.898123] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fc4f2e2-f996-4671-981f-75f90100e81e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.906638] env[63297]: DEBUG oslo_vmware.api [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1951.906638] env[63297]: value = "task-1698497" [ 1951.906638] env[63297]: _type = "Task" [ 1951.906638] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.914468] env[63297]: DEBUG oslo_vmware.api [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698497, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.418202] env[63297]: DEBUG oslo_vmware.api [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698497, 'name': PowerOnVM_Task, 'duration_secs': 0.36782} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.418202] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1952.419255] env[63297]: DEBUG nova.compute.manager [None req-f56f9492-87bd-46da-b5c3-364a5c633d71 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1952.419372] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ad7507-1b88-4d23-a526-e23ad6f9e24c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.664908] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.665305] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.665628] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.665709] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1961.665783] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.661339] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.661578] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.166702] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.669660] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.669974] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.670201] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.670392] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1963.671747] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d517a70a-bf9e-473f-8242-e71b0c0f863f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.679644] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c36351-2b4e-4fa5-975b-317c6b8bb854 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.693760] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1121bf4d-16d2-4461-aef9-556e3a326937 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.700013] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62db6a17-722f-4b63-9e7a-b8d9fc929ae1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.727836] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181319MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1963.727963] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.728161] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.752481] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 3fca16fa-0768-4ea8-87f2-b5a37898cdfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1964.752741] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1964.752829] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1964.779619] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bc88bb-a6be-46d4-8e00-c4eb31650da5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.787168] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b427213-1c75-4235-aad5-f56364a3f9d5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.816951] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057ed7cd-71c3-47fe-88f1-c3942b78c467 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.824055] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ced6e5-8209-4637-86fb-e343de64a5fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.836798] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1965.340286] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1965.845411] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1965.845795] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.117s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.344140] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1967.344613] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1967.344613] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1967.873776] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1967.873928] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.874089] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1967.874247] env[63297]: DEBUG nova.objects.instance [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lazy-loading 'info_cache' on Instance uuid 3fca16fa-0768-4ea8-87f2-b5a37898cdfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1969.595751] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.098900] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.099144] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1970.099370] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.099538] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1984.638056] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.638450] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1984.638556] env[63297]: INFO nova.compute.manager [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Rebooting instance [ 1985.157136] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.157338] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.157527] env[63297]: DEBUG nova.network.neutron [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1985.849270] env[63297]: DEBUG nova.network.neutron [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.352995] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.356641] env[63297]: DEBUG nova.compute.manager [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1986.357528] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cd5148-8f29-4cac-8358-80d609eb7568 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.373389] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82bb499-0296-4b53-a3aa-0445b05590fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.380524] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Doing hard reboot of VM {{(pid=63297) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1987.380733] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-7a544781-af2a-4458-84c9-b355456eec79 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.386763] env[63297]: DEBUG oslo_vmware.api [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 1987.386763] env[63297]: value = "task-1698498" [ 1987.386763] env[63297]: _type = "Task" [ 1987.386763] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.393819] env[63297]: DEBUG oslo_vmware.api [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698498, 'name': ResetVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.896543] env[63297]: DEBUG oslo_vmware.api [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698498, 'name': ResetVM_Task, 'duration_secs': 0.090868} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.896843] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Did hard reboot of VM {{(pid=63297) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1987.896979] env[63297]: DEBUG nova.compute.manager [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1987.897775] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f700289e-3816-4859-98b7-5ca949d2c8ef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.409819] env[63297]: DEBUG oslo_concurrency.lockutils [None req-42f6ab32-2dde-430f-b2a1-823101f6ad43 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.771s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.665790] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.666072] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.665536] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.665855] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.665963] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2022.661642] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.664566] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.664858] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.664965] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2025.170678] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.170987] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.171209] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.171397] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2025.172338] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48de1079-0b3b-4ed1-8bb4-ae817a017d40 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.180952] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596d783f-b4d6-4d02-b002-352ca20fd355 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.194845] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70845480-24a9-45f4-ac20-63363165e486 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.201133] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92de115-8123-415a-b120-0748b2e7586a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.230621] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181227MB free_disk=181GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2025.230830] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.231342] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.871070] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "6508e861-6f06-4ebd-be0f-22312d983306" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.871363] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "6508e861-6f06-4ebd-be0f-22312d983306" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.251783] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 3fca16fa-0768-4ea8-87f2-b5a37898cdfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2026.374053] env[63297]: DEBUG nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2026.756532] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 6508e861-6f06-4ebd-be0f-22312d983306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2026.756790] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2026.756918] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2026.791798] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068313a9-c784-4db1-ab1d-4a5a8556c0d3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.799195] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57345b43-010c-44c4-8755-c4ae26ddc541 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.827846] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb91d1c-f43a-4ef3-819e-a3326d8733ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.834610] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7a456c-ff75-43cc-a9aa-0ff45f254198 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.847301] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2026.892162] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.350256] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2027.351708] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2027.351912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.121s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.352203] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.460s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2027.353737] env[63297]: INFO nova.compute.claims [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2027.356138] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.356286] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2027.867077] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] There are 19 instances to clean {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2027.867330] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c3dbab93-1ac5-4c0c-862d-88638a460198] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2028.372473] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 4b2c0d59-5cdb-449d-8452-d1b0dea6d334] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2028.407023] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978a8c4c-9f6c-4666-9346-042cc5d23568 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.414983] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85746ec-585f-4c85-b761-e55001fe43dc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.444319] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463afd7b-f3d9-4fb9-9572-8a0bb2bc132c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.451053] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b2d2f9-d83f-4a43-be42-e2f80e71d6db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.463758] env[63297]: DEBUG nova.compute.provider_tree [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2028.875645] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 41d4118d-7621-4ac9-be2f-2664cd691180] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2028.968095] env[63297]: DEBUG nova.scheduler.client.report [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2029.379267] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 1316bf99-cc93-4d1a-b31c-000dac095b3e] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2029.472450] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.120s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.472962] env[63297]: DEBUG nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2029.882662] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: b6a1f66d-783e-4263-b9c4-a4d517ce2923] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2029.978135] env[63297]: DEBUG nova.compute.utils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2029.979425] env[63297]: DEBUG nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2029.979594] env[63297]: DEBUG nova.network.neutron [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2030.036072] env[63297]: DEBUG nova.policy [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'acb339bdff424582a2aad0578cc255eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '612e80df32dc4fb39e2fcf28c0c0a80b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 2030.287316] env[63297]: DEBUG nova.network.neutron [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Successfully created port: b9858162-f29d-48e7-aa88-421c45ade58b {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2030.386611] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: d4b2da36-b0fd-47d2-95de-ef4b3f91330f] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2030.483199] env[63297]: DEBUG nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2030.889987] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: d7db24c1-35db-46d5-a406-fbb8c1c5d158] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2031.393717] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c257ee27-8d87-4fe6-a953-cc4af1ec36d6] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2031.492415] env[63297]: DEBUG nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2031.518937] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2031.519229] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2031.519396] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2031.519586] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2031.519810] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2031.519980] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2031.520206] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2031.520364] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2031.520543] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2031.520746] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2031.520939] env[63297]: DEBUG nova.virt.hardware [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2031.521829] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457aeecb-8780-46d1-9275-ced5523b11d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.530052] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4075116b-875a-41de-be38-d6eb2c118992 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.655171] env[63297]: DEBUG nova.compute.manager [req-5fac38f4-1f91-4fc4-80d7-cccafe7258c1 req-e1caadb1-9125-45b1-9186-788ebd395d08 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Received event network-vif-plugged-b9858162-f29d-48e7-aa88-421c45ade58b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2031.655397] env[63297]: DEBUG oslo_concurrency.lockutils [req-5fac38f4-1f91-4fc4-80d7-cccafe7258c1 req-e1caadb1-9125-45b1-9186-788ebd395d08 service nova] Acquiring lock "6508e861-6f06-4ebd-be0f-22312d983306-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.655603] env[63297]: DEBUG oslo_concurrency.lockutils [req-5fac38f4-1f91-4fc4-80d7-cccafe7258c1 req-e1caadb1-9125-45b1-9186-788ebd395d08 service nova] Lock "6508e861-6f06-4ebd-be0f-22312d983306-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.655771] env[63297]: DEBUG oslo_concurrency.lockutils [req-5fac38f4-1f91-4fc4-80d7-cccafe7258c1 req-e1caadb1-9125-45b1-9186-788ebd395d08 service nova] Lock "6508e861-6f06-4ebd-be0f-22312d983306-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.655941] env[63297]: DEBUG nova.compute.manager [req-5fac38f4-1f91-4fc4-80d7-cccafe7258c1 req-e1caadb1-9125-45b1-9186-788ebd395d08 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] No waiting events found dispatching network-vif-plugged-b9858162-f29d-48e7-aa88-421c45ade58b {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2031.656185] env[63297]: WARNING nova.compute.manager [req-5fac38f4-1f91-4fc4-80d7-cccafe7258c1 req-e1caadb1-9125-45b1-9186-788ebd395d08 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Received unexpected event network-vif-plugged-b9858162-f29d-48e7-aa88-421c45ade58b for instance with vm_state building and task_state spawning. [ 2031.786985] env[63297]: DEBUG nova.network.neutron [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Successfully updated port: b9858162-f29d-48e7-aa88-421c45ade58b {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2031.898046] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 927824d3-a98b-47b4-a850-1fb15fd0fbe4] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2032.289652] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-6508e861-6f06-4ebd-be0f-22312d983306" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2032.289833] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-6508e861-6f06-4ebd-be0f-22312d983306" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.289931] env[63297]: DEBUG nova.network.neutron [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2032.400278] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 37a4719c-20b4-4cb3-b8fc-bfa28b906799] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2032.821636] env[63297]: DEBUG nova.network.neutron [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2032.903554] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: c1696ee9-cb48-414c-b0a0-b6fa2e880a81] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2032.958340] env[63297]: DEBUG nova.network.neutron [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Updating instance_info_cache with network_info: [{"id": "b9858162-f29d-48e7-aa88-421c45ade58b", "address": "fa:16:3e:6e:6d:9b", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9858162-f2", "ovs_interfaceid": "b9858162-f29d-48e7-aa88-421c45ade58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.406708] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 5c0eefd2-69d4-4100-93b9-d6265c28c7be] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2033.460898] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-6508e861-6f06-4ebd-be0f-22312d983306" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.461245] env[63297]: DEBUG nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Instance network_info: |[{"id": "b9858162-f29d-48e7-aa88-421c45ade58b", "address": "fa:16:3e:6e:6d:9b", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9858162-f2", "ovs_interfaceid": "b9858162-f29d-48e7-aa88-421c45ade58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2033.461687] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:6d:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9858162-f29d-48e7-aa88-421c45ade58b', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2033.469592] env[63297]: DEBUG oslo.service.loopingcall [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.470176] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2033.470421] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06369539-b630-4424-9bfa-fb1321f197b0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.490954] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2033.490954] env[63297]: value = "task-1698499" [ 2033.490954] env[63297]: _type = "Task" [ 2033.490954] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.498929] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698499, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.683074] env[63297]: DEBUG nova.compute.manager [req-fe3ae5e5-6b41-49a2-b632-b6f64c34eb6e req-419fe28e-a332-47cd-bd6e-2fa60d770c33 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Received event network-changed-b9858162-f29d-48e7-aa88-421c45ade58b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2033.683374] env[63297]: DEBUG nova.compute.manager [req-fe3ae5e5-6b41-49a2-b632-b6f64c34eb6e req-419fe28e-a332-47cd-bd6e-2fa60d770c33 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Refreshing instance network info cache due to event network-changed-b9858162-f29d-48e7-aa88-421c45ade58b. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2033.683607] env[63297]: DEBUG oslo_concurrency.lockutils [req-fe3ae5e5-6b41-49a2-b632-b6f64c34eb6e req-419fe28e-a332-47cd-bd6e-2fa60d770c33 service nova] Acquiring lock "refresh_cache-6508e861-6f06-4ebd-be0f-22312d983306" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.683726] env[63297]: DEBUG oslo_concurrency.lockutils [req-fe3ae5e5-6b41-49a2-b632-b6f64c34eb6e req-419fe28e-a332-47cd-bd6e-2fa60d770c33 service nova] Acquired lock "refresh_cache-6508e861-6f06-4ebd-be0f-22312d983306" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.683884] env[63297]: DEBUG nova.network.neutron [req-fe3ae5e5-6b41-49a2-b632-b6f64c34eb6e req-419fe28e-a332-47cd-bd6e-2fa60d770c33 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Refreshing network info cache for port b9858162-f29d-48e7-aa88-421c45ade58b {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2033.910407] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 6a99c537-e882-4c8c-b7c3-0861a5c0dc0d] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2034.001288] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698499, 'name': CreateVM_Task, 'duration_secs': 0.353488} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.001416] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2034.002064] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.002220] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.002538] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2034.002783] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-505f743e-613a-4916-9a40-eb0d86b1852b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.007849] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2034.007849] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e7997f-84a2-247e-937b-a06c643d0f7b" [ 2034.007849] env[63297]: _type = "Task" [ 2034.007849] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.015575] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e7997f-84a2-247e-937b-a06c643d0f7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.360015] env[63297]: DEBUG nova.network.neutron [req-fe3ae5e5-6b41-49a2-b632-b6f64c34eb6e req-419fe28e-a332-47cd-bd6e-2fa60d770c33 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Updated VIF entry in instance network info cache for port b9858162-f29d-48e7-aa88-421c45ade58b. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2034.360409] env[63297]: DEBUG nova.network.neutron [req-fe3ae5e5-6b41-49a2-b632-b6f64c34eb6e req-419fe28e-a332-47cd-bd6e-2fa60d770c33 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Updating instance_info_cache with network_info: [{"id": "b9858162-f29d-48e7-aa88-421c45ade58b", "address": "fa:16:3e:6e:6d:9b", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9858162-f2", "ovs_interfaceid": "b9858162-f29d-48e7-aa88-421c45ade58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.414788] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: ffaa1402-5b51-4393-82c7-d9db964edfd3] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2034.517600] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e7997f-84a2-247e-937b-a06c643d0f7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.863066] env[63297]: DEBUG oslo_concurrency.lockutils [req-fe3ae5e5-6b41-49a2-b632-b6f64c34eb6e req-419fe28e-a332-47cd-bd6e-2fa60d770c33 service nova] Releasing lock "refresh_cache-6508e861-6f06-4ebd-be0f-22312d983306" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.917863] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: f87867c3-58d4-4bd6-b6ef-1608ebef6b22] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2035.018691] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e7997f-84a2-247e-937b-a06c643d0f7b, 'name': SearchDatastore_Task, 'duration_secs': 0.600778} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.018912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.019177] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2035.019454] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.019606] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.019787] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2035.020054] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0491360-7dca-4b4b-a544-9b1c6d25f404 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.421101] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 427c4ff0-1bf1-4bfb-b5c6-de6659148ab1] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2035.588290] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2035.588539] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2035.589301] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3245d2d3-59e1-4f2d-b105-7224fca9b8a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.595351] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2035.595351] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ee7906-c466-5bc0-fc87-b4d1c5100a27" [ 2035.595351] env[63297]: _type = "Task" [ 2035.595351] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.602488] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ee7906-c466-5bc0-fc87-b4d1c5100a27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.924647] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 42d872d6-da12-474b-8741-1d991d507cfa] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2036.105834] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52ee7906-c466-5bc0-fc87-b4d1c5100a27, 'name': SearchDatastore_Task, 'duration_secs': 0.009184} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.106624] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b7068a0-9861-4871-ae0c-0c23828ba1ca {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.111582] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2036.111582] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d93f53-438f-35e3-2350-ef2cc5cfb91b" [ 2036.111582] env[63297]: _type = "Task" [ 2036.111582] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.119104] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d93f53-438f-35e3-2350-ef2cc5cfb91b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.428522] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 10def566-2d1f-4ea2-9df5-ebf4d77f7b48] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2036.621997] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d93f53-438f-35e3-2350-ef2cc5cfb91b, 'name': SearchDatastore_Task, 'duration_secs': 0.009881} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.622238] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.622492] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306/6508e861-6f06-4ebd-be0f-22312d983306.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2036.622739] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b062c7d-3ccc-4803-ab7d-d58fad049142 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.629111] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2036.629111] env[63297]: value = "task-1698500" [ 2036.629111] env[63297]: _type = "Task" [ 2036.629111] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.636428] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.932394] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: fda9a4b0-ca3a-4ab1-8eda-6dc6475a3c69] Instance has had 0 of 5 cleanup attempts {{(pid=63297) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2037.138679] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698500, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470823} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.138949] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306/6508e861-6f06-4ebd-be0f-22312d983306.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2037.139146] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2037.139390] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef0f0722-d8c0-4b53-8e36-5615909d8bce {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.145496] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2037.145496] env[63297]: value = "task-1698501" [ 2037.145496] env[63297]: _type = "Task" [ 2037.145496] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.152359] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698501, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.435982] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2037.436381] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Cleaning up deleted instances with incomplete migration {{(pid=63297) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2037.654548] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698501, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062563} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.654810] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2037.655577] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4bbf9b-bf8a-4d70-a484-1910dc244bf6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.676720] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306/6508e861-6f06-4ebd-be0f-22312d983306.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2037.676961] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59e41488-5d22-45ed-8b0d-7bca60c6b9cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.696731] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2037.696731] env[63297]: value = "task-1698502" [ 2037.696731] env[63297]: _type = "Task" [ 2037.696731] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.704264] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.938799] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.207120] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698502, 'name': ReconfigVM_Task, 'duration_secs': 0.270645} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.207379] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306/6508e861-6f06-4ebd-be0f-22312d983306.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2038.207973] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-291ae278-4175-4d2c-91c2-7808982858e4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.215017] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2038.215017] env[63297]: value = "task-1698503" [ 2038.215017] env[63297]: _type = "Task" [ 2038.215017] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.222345] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698503, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.725367] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698503, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.225751] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698503, 'name': Rename_Task} progress is 99%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.727931] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698503, 'name': Rename_Task, 'duration_secs': 1.142334} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.728383] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2039.728742] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9df7fc0-7bc7-4a6d-a0c8-9d88744c5d68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.735849] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2039.735849] env[63297]: value = "task-1698504" [ 2039.735849] env[63297]: _type = "Task" [ 2039.735849] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.746493] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.245654] env[63297]: DEBUG oslo_vmware.api [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698504, 'name': PowerOnVM_Task, 'duration_secs': 0.448705} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.245923] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2040.246146] env[63297]: INFO nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Took 8.75 seconds to spawn the instance on the hypervisor. [ 2040.246353] env[63297]: DEBUG nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2040.247132] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85371ee-0a2c-450f-91cb-a2c9c0d7903d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.441381] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2040.441591] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2040.441694] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2040.767260] env[63297]: INFO nova.compute.manager [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Took 13.89 seconds to build instance. [ 2040.979751] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.979912] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.980104] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2040.980285] env[63297]: DEBUG nova.objects.instance [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lazy-loading 'info_cache' on Instance uuid 3fca16fa-0768-4ea8-87f2-b5a37898cdfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2041.269461] env[63297]: DEBUG oslo_concurrency.lockutils [None req-9adeef18-73df-45f2-8bb3-20084fb9b3ae tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "6508e861-6f06-4ebd-be0f-22312d983306" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.398s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.360278] env[63297]: INFO nova.compute.manager [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Rebuilding instance [ 2041.404114] env[63297]: DEBUG nova.compute.manager [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2041.404982] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8157767f-2fa8-42e1-996e-a6eaa3c43e63 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.916533] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2041.916917] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22be4daf-9517-4f7f-bfc9-41667ec1dba3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.924224] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2041.924224] env[63297]: value = "task-1698505" [ 2041.924224] env[63297]: _type = "Task" [ 2041.924224] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.932107] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698505, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.434642] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698505, 'name': PowerOffVM_Task, 'duration_secs': 0.211707} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.434930] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2042.435170] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2042.435923] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784ab72a-7fa2-4100-86a5-79311ef8c51f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.442742] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2042.442948] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-796dd3a6-9be3-4151-8fc7-51b049ab2987 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.522580] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2042.522824] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2042.522969] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleting the datastore file [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2042.523233] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fb5aeac-be91-43cd-9125-0a1335a7b64b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.529957] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2042.529957] env[63297]: value = "task-1698507" [ 2042.529957] env[63297]: _type = "Task" [ 2042.529957] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.537809] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698507, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.702384] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.039399] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698507, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218862} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.039763] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2043.039763] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2043.040012] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2043.204755] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2043.204962] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2044.068452] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2044.068767] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2044.068838] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2044.069018] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2044.069170] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2044.069318] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2044.069520] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2044.069678] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2044.069843] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2044.070011] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2044.070192] env[63297]: DEBUG nova.virt.hardware [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2044.071039] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc430650-6e4f-479e-b452-da5036df7b46 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.079222] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c59aea-4493-492e-a4d1-841ba044fc3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.093546] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:6d:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9858162-f29d-48e7-aa88-421c45ade58b', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2044.100904] env[63297]: DEBUG oslo.service.loopingcall [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2044.101130] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2044.101323] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad779cf7-7b6a-47c9-80ca-b30199822ca6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.120109] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2044.120109] env[63297]: value = "task-1698508" [ 2044.120109] env[63297]: _type = "Task" [ 2044.120109] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.127088] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698508, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.629862] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698508, 'name': CreateVM_Task, 'duration_secs': 0.320201} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.630090] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2044.630754] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.630939] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.631288] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2044.631544] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff169192-6376-43d8-9174-a206b8ae593a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.636189] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2044.636189] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5272fca9-cac5-c235-7f42-80c7a3fccffa" [ 2044.636189] env[63297]: _type = "Task" [ 2044.636189] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.643552] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5272fca9-cac5-c235-7f42-80c7a3fccffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.146188] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5272fca9-cac5-c235-7f42-80c7a3fccffa, 'name': SearchDatastore_Task, 'duration_secs': 0.027875} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.146631] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.146749] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2045.146983] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2045.147148] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2045.147326] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2045.147603] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bcfcbaa-f92c-41a1-bd31-778e1372bbd3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.155671] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2045.155843] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2045.156536] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb91afde-72f0-4da6-8365-c1ad85fb33de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.161282] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2045.161282] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]5204df1b-ca31-906c-1e33-5a86034fce56" [ 2045.161282] env[63297]: _type = "Task" [ 2045.161282] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.168808] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5204df1b-ca31-906c-1e33-5a86034fce56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.672153] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]5204df1b-ca31-906c-1e33-5a86034fce56, 'name': SearchDatastore_Task, 'duration_secs': 0.024046} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.672932] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bef15309-3e95-48fd-8c15-169171ef9273 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.678516] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2045.678516] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bad3a4-b8ca-cae3-1631-da4ba85c31bc" [ 2045.678516] env[63297]: _type = "Task" [ 2045.678516] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.685862] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bad3a4-b8ca-cae3-1631-da4ba85c31bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.189154] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bad3a4-b8ca-cae3-1631-da4ba85c31bc, 'name': SearchDatastore_Task, 'duration_secs': 0.027246} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.189543] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.189679] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306/6508e861-6f06-4ebd-be0f-22312d983306.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2046.189945] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e3a4157-db33-4f6b-b130-8bc973378a98 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.197140] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2046.197140] env[63297]: value = "task-1698509" [ 2046.197140] env[63297]: _type = "Task" [ 2046.197140] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.204629] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.707023] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462068} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.707230] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306/6508e861-6f06-4ebd-be0f-22312d983306.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2046.707433] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2046.707704] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bac14b83-c024-4875-93fa-678acdd8b728 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.713838] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2046.713838] env[63297]: value = "task-1698510" [ 2046.713838] env[63297]: _type = "Task" [ 2046.713838] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.720863] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698510, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.223300] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698510, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076185} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.223721] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2047.224329] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f343f9-2b49-4b54-9fdd-a316ec79be8e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.246405] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306/6508e861-6f06-4ebd-be0f-22312d983306.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2047.246658] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-839dc9b1-d484-419c-a09b-768184f1a022 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.265557] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2047.265557] env[63297]: value = "task-1698511" [ 2047.265557] env[63297]: _type = "Task" [ 2047.265557] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.272844] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698511, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.774875] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698511, 'name': ReconfigVM_Task, 'duration_secs': 0.280414} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.775115] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306/6508e861-6f06-4ebd-be0f-22312d983306.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2047.775728] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16b56bdf-1885-44a8-91eb-1bdf3c7cdb56 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.781771] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2047.781771] env[63297]: value = "task-1698512" [ 2047.781771] env[63297]: _type = "Task" [ 2047.781771] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.789371] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698512, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.291901] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698512, 'name': Rename_Task, 'duration_secs': 0.159189} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.292313] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2048.292453] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8efd3274-18dd-44ed-b2bc-02f1f9ed8464 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.298739] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2048.298739] env[63297]: value = "task-1698513" [ 2048.298739] env[63297]: _type = "Task" [ 2048.298739] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.305874] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.810020] env[63297]: DEBUG oslo_vmware.api [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698513, 'name': PowerOnVM_Task, 'duration_secs': 0.453045} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.810308] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2048.810519] env[63297]: DEBUG nova.compute.manager [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2048.811319] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86fc3b9-17bc-429d-841d-f0ddf3147fc6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.328472] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.328715] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.328963] env[63297]: DEBUG nova.objects.instance [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63297) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2050.337994] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7af1bc83-13c7-4ecf-91e4-d62d2788157f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.676976] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "6508e861-6f06-4ebd-be0f-22312d983306" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.677252] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "6508e861-6f06-4ebd-be0f-22312d983306" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.677474] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "6508e861-6f06-4ebd-be0f-22312d983306-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.677659] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "6508e861-6f06-4ebd-be0f-22312d983306-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.677839] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "6508e861-6f06-4ebd-be0f-22312d983306-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.680109] env[63297]: INFO nova.compute.manager [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Terminating instance [ 2050.681833] env[63297]: DEBUG nova.compute.manager [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2050.682044] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2050.682881] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee60a79b-8e59-4025-93e9-81ef22a2935d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.691340] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2050.691566] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74d32339-6350-4f1a-808d-f3c76a89ca5f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.698324] env[63297]: DEBUG oslo_vmware.api [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2050.698324] env[63297]: value = "task-1698514" [ 2050.698324] env[63297]: _type = "Task" [ 2050.698324] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.708039] env[63297]: DEBUG oslo_vmware.api [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.209024] env[63297]: DEBUG oslo_vmware.api [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698514, 'name': PowerOffVM_Task, 'duration_secs': 0.194212} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.209289] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2051.209455] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2051.209686] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a626db9c-0cb9-4665-8627-28ebc17e6cf8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.279739] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2051.279968] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2051.280204] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleting the datastore file [datastore1] 6508e861-6f06-4ebd-be0f-22312d983306 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2051.280459] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f912932-8ea4-467d-936a-c878c7d39124 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.287232] env[63297]: DEBUG oslo_vmware.api [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2051.287232] env[63297]: value = "task-1698516" [ 2051.287232] env[63297]: _type = "Task" [ 2051.287232] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.295203] env[63297]: DEBUG oslo_vmware.api [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.797145] env[63297]: DEBUG oslo_vmware.api [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18908} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.797439] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2051.797653] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2051.797864] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2051.798067] env[63297]: INFO nova.compute.manager [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2051.798311] env[63297]: DEBUG oslo.service.loopingcall [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2051.798501] env[63297]: DEBUG nova.compute.manager [-] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2051.798595] env[63297]: DEBUG nova.network.neutron [-] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2052.037219] env[63297]: DEBUG nova.compute.manager [req-025f52fe-1c36-4cae-afed-92374b603ea1 req-9c7bb7bd-9093-40f1-b1df-76c6f08814d1 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Received event network-vif-deleted-b9858162-f29d-48e7-aa88-421c45ade58b {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2052.037295] env[63297]: INFO nova.compute.manager [req-025f52fe-1c36-4cae-afed-92374b603ea1 req-9c7bb7bd-9093-40f1-b1df-76c6f08814d1 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Neutron deleted interface b9858162-f29d-48e7-aa88-421c45ade58b; detaching it from the instance and deleting it from the info cache [ 2052.037453] env[63297]: DEBUG nova.network.neutron [req-025f52fe-1c36-4cae-afed-92374b603ea1 req-9c7bb7bd-9093-40f1-b1df-76c6f08814d1 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2052.516912] env[63297]: DEBUG nova.network.neutron [-] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2052.539846] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60aed084-7b8e-45e7-b6ae-bba925f9d300 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.549789] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7506ab4a-b414-45e7-824e-29489e83fccb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.574103] env[63297]: DEBUG nova.compute.manager [req-025f52fe-1c36-4cae-afed-92374b603ea1 req-9c7bb7bd-9093-40f1-b1df-76c6f08814d1 service nova] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Detach interface failed, port_id=b9858162-f29d-48e7-aa88-421c45ade58b, reason: Instance 6508e861-6f06-4ebd-be0f-22312d983306 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2053.019305] env[63297]: INFO nova.compute.manager [-] [instance: 6508e861-6f06-4ebd-be0f-22312d983306] Took 1.22 seconds to deallocate network for instance. [ 2053.526366] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.526694] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.526973] env[63297]: DEBUG nova.objects.instance [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'resources' on Instance uuid 6508e861-6f06-4ebd-be0f-22312d983306 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2054.071744] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4f7534-81d2-46e5-835c-93046245e3e8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.079429] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4638bc9b-f39a-43ad-a928-93cebf5f32b3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.110043] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8b8bce-981a-4fb4-89d2-d39d2c3bd59d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.117264] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756111c9-4d2d-4d90-9aee-6ceb5bc5a9e3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.130087] env[63297]: DEBUG nova.compute.provider_tree [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2054.632917] env[63297]: DEBUG nova.scheduler.client.report [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2055.138209] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.611s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.160657] env[63297]: INFO nova.scheduler.client.report [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted allocations for instance 6508e861-6f06-4ebd-be0f-22312d983306 [ 2055.669367] env[63297]: DEBUG oslo_concurrency.lockutils [None req-5ff3f5c1-d9d9-493b-96fb-72084b45d5df tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "6508e861-6f06-4ebd-be0f-22312d983306" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.992s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.331972] env[63297]: DEBUG nova.compute.manager [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2057.848691] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.848947] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.353740] env[63297]: INFO nova.compute.claims [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2058.860464] env[63297]: INFO nova.compute.resource_tracker [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating resource usage from migration 1f4e2e4e-f778-45f6-b2d4-5db5ea759842 [ 2058.896693] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baec2baf-261d-4cfa-8de9-7e5907128a3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.904698] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f248090c-8c9e-4ea8-9414-56b7580c05b7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.933535] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f444af-0be7-419a-82d6-f05694be71f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.940538] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03fca0a-19d8-4db2-bcb3-5596860f0c3b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.953225] env[63297]: DEBUG nova.compute.provider_tree [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2059.456281] env[63297]: DEBUG nova.scheduler.client.report [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2059.962115] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.113s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.962290] env[63297]: INFO nova.compute.manager [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Migrating [ 2060.477332] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.477644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.477787] env[63297]: DEBUG nova.network.neutron [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2061.194300] env[63297]: DEBUG nova.network.neutron [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.697501] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.211768] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8678e0-5cb5-4fd6-bb69-c42c28145450 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.230309] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance '3fca16fa-0768-4ea8-87f2-b5a37898cdfa' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2063.736356] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2063.736694] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0d2653a-1791-45ae-90ed-faa10e21dc61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.744558] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2063.744558] env[63297]: value = "task-1698517" [ 2063.744558] env[63297]: _type = "Task" [ 2063.744558] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.752742] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698517, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.254369] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698517, 'name': PowerOffVM_Task, 'duration_secs': 0.169583} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.254780] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2064.254883] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance '3fca16fa-0768-4ea8-87f2-b5a37898cdfa' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2064.761127] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2064.761430] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2064.761527] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2064.761711] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2064.761864] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2064.762023] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2064.762248] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2064.762412] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2064.762581] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2064.762743] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2064.762915] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2064.767918] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af010d07-d0ac-41b4-ad11-6c2140fc0aa7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.783398] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2064.783398] env[63297]: value = "task-1698518" [ 2064.783398] env[63297]: _type = "Task" [ 2064.783398] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.790797] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.293751] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698518, 'name': ReconfigVM_Task, 'duration_secs': 0.154509} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.294192] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance '3fca16fa-0768-4ea8-87f2-b5a37898cdfa' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2065.800230] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2065.800510] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2065.800620] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2065.800815] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2065.800964] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2065.801132] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2065.801335] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2065.801495] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2065.801664] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2065.801827] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2065.801998] env[63297]: DEBUG nova.virt.hardware [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2065.807281] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2065.807583] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a1f943b-2f2c-4765-a0bf-bdd50d72f9e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.826677] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2065.826677] env[63297]: value = "task-1698519" [ 2065.826677] env[63297]: _type = "Task" [ 2065.826677] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.833917] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698519, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.337215] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698519, 'name': ReconfigVM_Task, 'duration_secs': 0.157805} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.337609] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2066.338246] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea37a0a5-34f7-486c-b233-9cc39a241458 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.360397] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 3fca16fa-0768-4ea8-87f2-b5a37898cdfa/3fca16fa-0768-4ea8-87f2-b5a37898cdfa.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2066.360935] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-092f09fd-d330-40ff-b0fb-91688853a4a7 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.378579] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2066.378579] env[63297]: value = "task-1698520" [ 2066.378579] env[63297]: _type = "Task" [ 2066.378579] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.385848] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698520, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.887655] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698520, 'name': ReconfigVM_Task, 'duration_secs': 0.237205} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.887976] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 3fca16fa-0768-4ea8-87f2-b5a37898cdfa/3fca16fa-0768-4ea8-87f2-b5a37898cdfa.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2066.888310] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance '3fca16fa-0768-4ea8-87f2-b5a37898cdfa' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2067.394886] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc7047d-953b-4438-854f-4f6b2d7273bc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.413595] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030b7e22-8424-41a9-925a-24f5275e6ab9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.430301] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance '3fca16fa-0768-4ea8-87f2-b5a37898cdfa' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2067.994672] env[63297]: DEBUG nova.network.neutron [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Port 48409784-eed5-4e22-940f-e406d1b3af8a binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2069.017768] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.017768] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.017768] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.075200] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.075439] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2070.075589] env[63297]: DEBUG nova.network.neutron [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2070.772570] env[63297]: DEBUG nova.network.neutron [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2071.275858] env[63297]: DEBUG oslo_concurrency.lockutils [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2071.800482] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5e392f-3ef5-4ca1-8de3-c44c9942c38f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.819443] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3580154-d9a5-48d5-8927-4149be7f0dbb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.825908] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance '3fca16fa-0768-4ea8-87f2-b5a37898cdfa' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2072.332270] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2072.332680] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d8949c1-9eb3-456c-a95b-978902301087 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.339926] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2072.339926] env[63297]: value = "task-1698521" [ 2072.339926] env[63297]: _type = "Task" [ 2072.339926] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.347571] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.850193] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698521, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.350460] env[63297]: DEBUG oslo_vmware.api [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698521, 'name': PowerOnVM_Task, 'duration_secs': 0.585785} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.350718] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2073.350896] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-b835b12d-8eb7-49e7-9256-e317be2e4e57 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance '3fca16fa-0768-4ea8-87f2-b5a37898cdfa' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2075.519344] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.519703] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.519844] env[63297]: DEBUG nova.compute.manager [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Going to confirm migration 9 {{(pid=63297) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2076.064782] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.064980] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.065177] env[63297]: DEBUG nova.network.neutron [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2076.065378] env[63297]: DEBUG nova.objects.instance [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'info_cache' on Instance uuid 3fca16fa-0768-4ea8-87f2-b5a37898cdfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2077.267877] env[63297]: DEBUG nova.network.neutron [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [{"id": "48409784-eed5-4e22-940f-e406d1b3af8a", "address": "fa:16:3e:b0:17:fc", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48409784-ee", "ovs_interfaceid": "48409784-eed5-4e22-940f-e406d1b3af8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.770752] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-3fca16fa-0768-4ea8-87f2-b5a37898cdfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.771025] env[63297]: DEBUG nova.objects.instance [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'migration_context' on Instance uuid 3fca16fa-0768-4ea8-87f2-b5a37898cdfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2078.274558] env[63297]: DEBUG nova.objects.base [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Object Instance<3fca16fa-0768-4ea8-87f2-b5a37898cdfa> lazy-loaded attributes: info_cache,migration_context {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2078.275504] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d821f2-af96-47c8-90d3-d9b0096706b1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.294734] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-467bf63b-af58-485b-b07f-8e076e8974e5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.300232] env[63297]: DEBUG oslo_vmware.api [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2078.300232] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d2c2c-9db8-1d23-3c8f-ee889581f516" [ 2078.300232] env[63297]: _type = "Task" [ 2078.300232] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.307651] env[63297]: DEBUG oslo_vmware.api [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d2c2c-9db8-1d23-3c8f-ee889581f516, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.810964] env[63297]: DEBUG oslo_vmware.api [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]529d2c2c-9db8-1d23-3c8f-ee889581f516, 'name': SearchDatastore_Task, 'duration_secs': 0.009571} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.811274] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.811533] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.379088] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb191f5-742f-499e-8b08-e19536542b22 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.386675] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93109c28-4d42-4930-a072-c3302d9f3d73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.416842] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6719c75a-3768-454a-92a0-ba03c267e045 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.424550] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ab25f5-fb90-4e7e-8340-55c70de695f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.437536] env[63297]: DEBUG nova.compute.provider_tree [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2079.957769] env[63297]: ERROR nova.scheduler.client.report [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [req-03889e68-0c31-466a-9186-e09f747536e9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-03889e68-0c31-466a-9186-e09f747536e9"}]} [ 2079.973507] env[63297]: DEBUG nova.scheduler.client.report [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2079.985555] env[63297]: DEBUG nova.scheduler.client.report [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2079.985879] env[63297]: DEBUG nova.compute.provider_tree [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2079.995755] env[63297]: DEBUG nova.scheduler.client.report [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2080.013029] env[63297]: DEBUG nova.scheduler.client.report [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2080.045822] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d4de9b-a07a-43b1-8728-38985db44038 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.053128] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6289ba-a872-45b9-8cc8-1400ff797bea {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.082165] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b127d945-0e09-45e3-961b-934aee761941 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.089066] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7582f6e5-bdad-4db6-a0fa-f12128f81903 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.101651] env[63297]: DEBUG nova.compute.provider_tree [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2080.630252] env[63297]: DEBUG nova.scheduler.client.report [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 184 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2080.630532] env[63297]: DEBUG nova.compute.provider_tree [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 184 to 185 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2080.630714] env[63297]: DEBUG nova.compute.provider_tree [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2080.665339] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.642948] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.831s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.665108] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.207650] env[63297]: INFO nova.scheduler.client.report [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted allocation for migration 1f4e2e4e-f778-45f6-b2d4-5db5ea759842 [ 2082.665147] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.713630] env[63297]: DEBUG oslo_concurrency.lockutils [None req-72ac8a44-49e5-45f3-a107-6870605690ee tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.194s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.660353] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.660596] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.660807] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.661022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.661179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.662847] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.664862] env[63297]: INFO nova.compute.manager [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Terminating instance [ 2083.666211] env[63297]: DEBUG nova.compute.manager [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2083.666489] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2083.667514] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6f0a8b-2185-454c-b716-cf39d01ac120 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.674872] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2083.675602] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a807747e-6da9-47a3-9802-f28b9df067e6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.681941] env[63297]: DEBUG oslo_vmware.api [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2083.681941] env[63297]: value = "task-1698522" [ 2083.681941] env[63297]: _type = "Task" [ 2083.681941] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.689590] env[63297]: DEBUG oslo_vmware.api [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698522, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.168866] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.169133] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2084.191152] env[63297]: DEBUG oslo_vmware.api [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698522, 'name': PowerOffVM_Task, 'duration_secs': 0.170384} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.191415] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2084.191582] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2084.191828] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d82becf-7170-4b38-ade1-730178a8077b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.265415] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2084.265649] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2084.265832] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleting the datastore file [datastore1] 3fca16fa-0768-4ea8-87f2-b5a37898cdfa {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2084.266169] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c466df2-3f83-479c-a307-1ad0a5a2cbf6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.274953] env[63297]: DEBUG oslo_vmware.api [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2084.274953] env[63297]: value = "task-1698524" [ 2084.274953] env[63297]: _type = "Task" [ 2084.274953] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.282447] env[63297]: DEBUG oslo_vmware.api [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.784682] env[63297]: DEBUG oslo_vmware.api [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136376} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.785706] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2084.785706] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2084.785875] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2084.785979] env[63297]: INFO nova.compute.manager [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2084.786268] env[63297]: DEBUG oslo.service.loopingcall [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2084.786467] env[63297]: DEBUG nova.compute.manager [-] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2084.786563] env[63297]: DEBUG nova.network.neutron [-] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2085.168349] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.213227] env[63297]: DEBUG nova.compute.manager [req-ff2b517d-8b33-4d91-b19f-ac59c279d69d req-132d5edb-f3db-4b0f-baf4-2b882cbc90f3 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Received event network-vif-deleted-48409784-eed5-4e22-940f-e406d1b3af8a {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2085.213438] env[63297]: INFO nova.compute.manager [req-ff2b517d-8b33-4d91-b19f-ac59c279d69d req-132d5edb-f3db-4b0f-baf4-2b882cbc90f3 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Neutron deleted interface 48409784-eed5-4e22-940f-e406d1b3af8a; detaching it from the instance and deleting it from the info cache [ 2085.213611] env[63297]: DEBUG nova.network.neutron [req-ff2b517d-8b33-4d91-b19f-ac59c279d69d req-132d5edb-f3db-4b0f-baf4-2b882cbc90f3 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.665061] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.686673] env[63297]: DEBUG nova.network.neutron [-] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.715780] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3001b9fb-44fb-4f84-ad89-8b732fde5437 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.725564] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bbdfc0-de3a-4b2c-8772-8b8b99e41f82 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.748122] env[63297]: DEBUG nova.compute.manager [req-ff2b517d-8b33-4d91-b19f-ac59c279d69d req-132d5edb-f3db-4b0f-baf4-2b882cbc90f3 service nova] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Detach interface failed, port_id=48409784-eed5-4e22-940f-e406d1b3af8a, reason: Instance 3fca16fa-0768-4ea8-87f2-b5a37898cdfa could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2086.189660] env[63297]: INFO nova.compute.manager [-] [instance: 3fca16fa-0768-4ea8-87f2-b5a37898cdfa] Took 1.40 seconds to deallocate network for instance. [ 2086.665695] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2086.666011] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2086.695851] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.696152] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.696344] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.717513] env[63297]: INFO nova.scheduler.client.report [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted allocations for instance 3fca16fa-0768-4ea8-87f2-b5a37898cdfa [ 2087.169494] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.169779] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.169927] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.170144] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2087.171048] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70a29a9-9405-42df-a598-c61d746a2a12 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.179091] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca54275-19a4-4ef7-9c64-316690e07e29 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.192826] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103fe0c6-d536-4e45-993c-908b73e50867 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.199063] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9cbd70-505d-4f63-911a-f89917e471dd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.230808] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181211MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2087.230979] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.231198] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.232800] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2d534f37-80a4-4cea-b421-9e295f577ba6 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "3fca16fa-0768-4ea8-87f2-b5a37898cdfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.572s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.573179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "538ae7a3-6e53-48ed-9b79-169175806dc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.573179] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.760056] env[63297]: INFO nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 538ae7a3-6e53-48ed-9b79-169175806dc4 has allocations against this compute host but is not found in the database. [ 2088.760056] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2088.760056] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2088.788772] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19181c13-9f64-4ab5-8ea2-0c0088048a17 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.796114] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a9662f-ffbb-4352-97d4-5504ac25255b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.826648] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945f9902-3743-42ba-9b7e-b6d37ef7d7ab {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.833688] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6c9d09-8010-44d4-8044-30ec6b62af8d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.846571] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2089.074479] env[63297]: DEBUG nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2089.349955] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2089.595141] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.854957] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2089.855188] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.624s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.855451] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.260s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.857048] env[63297]: INFO nova.compute.claims [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2090.892155] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66b35f3-424a-4a06-98f1-74b64a1151fc {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.900074] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed736ef1-fde8-4ede-a0bd-446a830d555c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.930373] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8c8517-81d0-409d-aa92-674eae87967d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.937527] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16deb6b-5d9f-400a-b759-48640e07f09f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.950490] env[63297]: DEBUG nova.compute.provider_tree [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2091.453556] env[63297]: DEBUG nova.scheduler.client.report [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2091.859407] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2091.859619] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2091.958664] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.959219] env[63297]: DEBUG nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2092.361997] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2092.464044] env[63297]: DEBUG nova.compute.utils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2092.465432] env[63297]: DEBUG nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2092.465605] env[63297]: DEBUG nova.network.neutron [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2092.515019] env[63297]: DEBUG nova.policy [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'acb339bdff424582a2aad0578cc255eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '612e80df32dc4fb39e2fcf28c0c0a80b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 2092.826064] env[63297]: DEBUG nova.network.neutron [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Successfully created port: 41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2092.970488] env[63297]: DEBUG nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2093.980360] env[63297]: DEBUG nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2094.009607] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2094.009844] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2094.010009] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2094.010220] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2094.010362] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2094.010508] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2094.010717] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2094.010876] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2094.011064] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2094.011239] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2094.011410] env[63297]: DEBUG nova.virt.hardware [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2094.012318] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327c461a-ead0-4789-a813-5d7674d02f9b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.021250] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93da3ff-e752-4cdb-8cec-a32b82225a41 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.239230] env[63297]: DEBUG nova.compute.manager [req-3c8f42b1-d15f-4040-a82b-e43ca8488f70 req-c0a117e9-839f-429d-ad4a-cdcc07039f63 service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Received event network-vif-plugged-41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2094.239426] env[63297]: DEBUG oslo_concurrency.lockutils [req-3c8f42b1-d15f-4040-a82b-e43ca8488f70 req-c0a117e9-839f-429d-ad4a-cdcc07039f63 service nova] Acquiring lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.239640] env[63297]: DEBUG oslo_concurrency.lockutils [req-3c8f42b1-d15f-4040-a82b-e43ca8488f70 req-c0a117e9-839f-429d-ad4a-cdcc07039f63 service nova] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.240053] env[63297]: DEBUG oslo_concurrency.lockutils [req-3c8f42b1-d15f-4040-a82b-e43ca8488f70 req-c0a117e9-839f-429d-ad4a-cdcc07039f63 service nova] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.240249] env[63297]: DEBUG nova.compute.manager [req-3c8f42b1-d15f-4040-a82b-e43ca8488f70 req-c0a117e9-839f-429d-ad4a-cdcc07039f63 service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] No waiting events found dispatching network-vif-plugged-41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2094.240419] env[63297]: WARNING nova.compute.manager [req-3c8f42b1-d15f-4040-a82b-e43ca8488f70 req-c0a117e9-839f-429d-ad4a-cdcc07039f63 service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Received unexpected event network-vif-plugged-41d0d073-3859-4e39-9e68-a1d33c810937 for instance with vm_state building and task_state spawning. [ 2094.327835] env[63297]: DEBUG nova.network.neutron [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Successfully updated port: 41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2094.831955] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2094.832168] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2094.832364] env[63297]: DEBUG nova.network.neutron [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2095.366331] env[63297]: DEBUG nova.network.neutron [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2095.490132] env[63297]: DEBUG nova.network.neutron [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2095.993265] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2095.993546] env[63297]: DEBUG nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Instance network_info: |[{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2095.993996] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:23:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41d0d073-3859-4e39-9e68-a1d33c810937', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2096.001612] env[63297]: DEBUG oslo.service.loopingcall [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2096.001848] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2096.002113] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c26ccecc-6b5e-4255-8c05-2f9dbf71cf81 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.022973] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2096.022973] env[63297]: value = "task-1698525" [ 2096.022973] env[63297]: _type = "Task" [ 2096.022973] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.031247] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698525, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.265161] env[63297]: DEBUG nova.compute.manager [req-ece035b4-be35-47db-94b0-55524de48756 req-da8151b7-621c-457d-a521-2312f0f6b8af service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Received event network-changed-41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2096.265364] env[63297]: DEBUG nova.compute.manager [req-ece035b4-be35-47db-94b0-55524de48756 req-da8151b7-621c-457d-a521-2312f0f6b8af service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Refreshing instance network info cache due to event network-changed-41d0d073-3859-4e39-9e68-a1d33c810937. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2096.265565] env[63297]: DEBUG oslo_concurrency.lockutils [req-ece035b4-be35-47db-94b0-55524de48756 req-da8151b7-621c-457d-a521-2312f0f6b8af service nova] Acquiring lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2096.265755] env[63297]: DEBUG oslo_concurrency.lockutils [req-ece035b4-be35-47db-94b0-55524de48756 req-da8151b7-621c-457d-a521-2312f0f6b8af service nova] Acquired lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2096.265931] env[63297]: DEBUG nova.network.neutron [req-ece035b4-be35-47db-94b0-55524de48756 req-da8151b7-621c-457d-a521-2312f0f6b8af service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Refreshing network info cache for port 41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2096.533048] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698525, 'name': CreateVM_Task, 'duration_secs': 0.321828} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.533402] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2096.533850] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2096.534021] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2096.534342] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2096.534591] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68ff0aa6-7acd-449d-8df8-dfb344bbda0e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.539624] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2096.539624] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fdee3e-c21e-df75-5b86-56f370e05954" [ 2096.539624] env[63297]: _type = "Task" [ 2096.539624] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.546962] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fdee3e-c21e-df75-5b86-56f370e05954, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.941663] env[63297]: DEBUG nova.network.neutron [req-ece035b4-be35-47db-94b0-55524de48756 req-da8151b7-621c-457d-a521-2312f0f6b8af service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updated VIF entry in instance network info cache for port 41d0d073-3859-4e39-9e68-a1d33c810937. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2096.942040] env[63297]: DEBUG nova.network.neutron [req-ece035b4-be35-47db-94b0-55524de48756 req-da8151b7-621c-457d-a521-2312f0f6b8af service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2097.049738] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52fdee3e-c21e-df75-5b86-56f370e05954, 'name': SearchDatastore_Task, 'duration_secs': 0.008932} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.049973] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2097.050218] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2097.050441] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2097.050584] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2097.050761] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2097.051013] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1a38bc1-4ed0-4a25-bc69-5926f2793a59 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.058873] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2097.059058] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2097.059708] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c17550fe-81e7-4872-b076-d33eeb7327b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.064427] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2097.064427] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b0a668-9c9f-0889-ef3c-cfadf8c204ad" [ 2097.064427] env[63297]: _type = "Task" [ 2097.064427] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.071439] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b0a668-9c9f-0889-ef3c-cfadf8c204ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.444290] env[63297]: DEBUG oslo_concurrency.lockutils [req-ece035b4-be35-47db-94b0-55524de48756 req-da8151b7-621c-457d-a521-2312f0f6b8af service nova] Releasing lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2097.574952] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52b0a668-9c9f-0889-ef3c-cfadf8c204ad, 'name': SearchDatastore_Task, 'duration_secs': 0.008074} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.575747] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-904353e2-657d-4b47-b3d2-5115bbc3586b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.581166] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2097.581166] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52708ba6-6143-1064-f80c-661d9ab07168" [ 2097.581166] env[63297]: _type = "Task" [ 2097.581166] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.590019] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52708ba6-6143-1064-f80c-661d9ab07168, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.091694] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52708ba6-6143-1064-f80c-661d9ab07168, 'name': SearchDatastore_Task, 'duration_secs': 0.009637} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.091947] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2098.092207] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4/538ae7a3-6e53-48ed-9b79-169175806dc4.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2098.092456] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d41dbf2-9071-4552-b2c2-31aff5ed72fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.099308] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2098.099308] env[63297]: value = "task-1698526" [ 2098.099308] env[63297]: _type = "Task" [ 2098.099308] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.106352] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.609477] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430946} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.609795] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4/538ae7a3-6e53-48ed-9b79-169175806dc4.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2098.609926] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2098.610181] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fe995b2-87ec-4dff-be9b-43f64ea8793c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.617141] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2098.617141] env[63297]: value = "task-1698527" [ 2098.617141] env[63297]: _type = "Task" [ 2098.617141] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.624239] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698527, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.126463] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698527, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054948} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.126734] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2099.127542] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b62926-1197-4705-8f66-d39ead1a1298 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.149203] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4/538ae7a3-6e53-48ed-9b79-169175806dc4.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2099.149452] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfef75e6-d761-4130-9d84-9de2293532a9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.168339] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2099.168339] env[63297]: value = "task-1698528" [ 2099.168339] env[63297]: _type = "Task" [ 2099.168339] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.178475] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698528, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.679498] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698528, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.180536] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698528, 'name': ReconfigVM_Task, 'duration_secs': 0.763423} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.180764] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4/538ae7a3-6e53-48ed-9b79-169175806dc4.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2100.181390] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7b3327f-edc9-4089-a042-7d80e5ac2301 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.187516] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2100.187516] env[63297]: value = "task-1698529" [ 2100.187516] env[63297]: _type = "Task" [ 2100.187516] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.194632] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698529, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.697352] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698529, 'name': Rename_Task, 'duration_secs': 0.134763} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.697731] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2100.697864] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53f2b3cf-c5d3-44af-a0cc-ff9a00e899ff {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.704274] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2100.704274] env[63297]: value = "task-1698530" [ 2100.704274] env[63297]: _type = "Task" [ 2100.704274] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.711142] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.215384] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698530, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.718893] env[63297]: DEBUG oslo_vmware.api [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698530, 'name': PowerOnVM_Task, 'duration_secs': 0.62587} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.719288] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2101.719603] env[63297]: INFO nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Took 7.74 seconds to spawn the instance on the hypervisor. [ 2101.719902] env[63297]: DEBUG nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2101.721054] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1082ec02-355b-48fb-b4b8-d2ee7027acd2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.244961] env[63297]: INFO nova.compute.manager [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Took 12.67 seconds to build instance. [ 2102.681784] env[63297]: DEBUG nova.compute.manager [req-555a638c-c5aa-4110-b8c2-f3db8b469069 req-16482d0a-76b0-436d-aff1-80637b2d993f service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Received event network-changed-41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2102.681784] env[63297]: DEBUG nova.compute.manager [req-555a638c-c5aa-4110-b8c2-f3db8b469069 req-16482d0a-76b0-436d-aff1-80637b2d993f service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Refreshing instance network info cache due to event network-changed-41d0d073-3859-4e39-9e68-a1d33c810937. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2102.681784] env[63297]: DEBUG oslo_concurrency.lockutils [req-555a638c-c5aa-4110-b8c2-f3db8b469069 req-16482d0a-76b0-436d-aff1-80637b2d993f service nova] Acquiring lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2102.682280] env[63297]: DEBUG oslo_concurrency.lockutils [req-555a638c-c5aa-4110-b8c2-f3db8b469069 req-16482d0a-76b0-436d-aff1-80637b2d993f service nova] Acquired lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2102.682280] env[63297]: DEBUG nova.network.neutron [req-555a638c-c5aa-4110-b8c2-f3db8b469069 req-16482d0a-76b0-436d-aff1-80637b2d993f service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Refreshing network info cache for port 41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2102.746583] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2c6a3c3f-d589-4f2a-8497-ccb95ec7b241 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.174s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.397720] env[63297]: DEBUG nova.network.neutron [req-555a638c-c5aa-4110-b8c2-f3db8b469069 req-16482d0a-76b0-436d-aff1-80637b2d993f service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updated VIF entry in instance network info cache for port 41d0d073-3859-4e39-9e68-a1d33c810937. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2103.398093] env[63297]: DEBUG nova.network.neutron [req-555a638c-c5aa-4110-b8c2-f3db8b469069 req-16482d0a-76b0-436d-aff1-80637b2d993f service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2103.901172] env[63297]: DEBUG oslo_concurrency.lockutils [req-555a638c-c5aa-4110-b8c2-f3db8b469069 req-16482d0a-76b0-436d-aff1-80637b2d993f service nova] Releasing lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2140.922713] env[63297]: DEBUG nova.compute.manager [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Stashing vm_state: active {{(pid=63297) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2141.441665] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.441946] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.948261] env[63297]: INFO nova.compute.claims [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2142.454529] env[63297]: INFO nova.compute.resource_tracker [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating resource usage from migration 65523337-f7d1-403b-88eb-30d7b7191e18 [ 2142.490844] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e88424-ca6e-4931-90e0-544c22a9bb38 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.498220] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d212e9af-8dca-49bd-b620-e2852bb83280 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.527263] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f757638c-ae44-4be3-ba04-b15d6c99c28e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.533819] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5299834-ef3d-410d-95b6-a97224192df3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.546281] env[63297]: DEBUG nova.compute.provider_tree [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2142.664984] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.048942] env[63297]: DEBUG nova.scheduler.client.report [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2143.554524] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.112s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.554742] env[63297]: INFO nova.compute.manager [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Migrating [ 2143.665191] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.665541] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.068488] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2144.068880] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2144.068880] env[63297]: DEBUG nova.network.neutron [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2144.666746] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.666912] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2144.771664] env[63297]: DEBUG nova.network.neutron [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.275097] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.661397] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2146.664988] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2146.665188] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2146.789243] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb80253-b217-40cf-bd69-ee6502806ebd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.806802] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance '538ae7a3-6e53-48ed-9b79-169175806dc4' progress to 0 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2147.313434] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2147.313737] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4ef12d3-114a-42ee-aa3e-b5643fbf666b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.320356] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2147.320356] env[63297]: value = "task-1698531" [ 2147.320356] env[63297]: _type = "Task" [ 2147.320356] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.327528] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.664924] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2147.830091] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.168229] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2148.168474] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2148.168627] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.168783] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2148.169697] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee9c260-cf50-4291-8476-db410364fac0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.177884] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c6ba8c-48a5-4391-b3e5-ad1f62c97f73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.192959] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e66fa0-fc05-467a-a40a-4216182095b2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.198915] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350adc53-12f9-4963-bc43-aa54d4ccbc04 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.227262] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181323MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2148.227403] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2148.227622] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2148.329729] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698531, 'name': PowerOffVM_Task, 'duration_secs': 0.734172} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.329977] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2148.330179] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance '538ae7a3-6e53-48ed-9b79-169175806dc4' progress to 17 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2148.836244] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2148.836709] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2148.836709] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2148.836857] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2148.836947] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2148.837105] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2148.837316] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2148.837478] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2148.837646] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2148.837811] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2148.837984] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2148.843121] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e28359b4-998c-46a1-b9b6-d2c6c4fd0e79 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.859037] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2148.859037] env[63297]: value = "task-1698532" [ 2148.859037] env[63297]: _type = "Task" [ 2148.859037] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.866509] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698532, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.234974] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Applying migration context for instance 538ae7a3-6e53-48ed-9b79-169175806dc4 as it has an incoming, in-progress migration 65523337-f7d1-403b-88eb-30d7b7191e18. Migration status is migrating {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2149.235538] env[63297]: INFO nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating resource usage from migration 65523337-f7d1-403b-88eb-30d7b7191e18 [ 2149.253184] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Migration 65523337-f7d1-403b-88eb-30d7b7191e18 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2149.253356] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 538ae7a3-6e53-48ed-9b79-169175806dc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2149.253535] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2149.253676] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=960MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2149.289364] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36dfe16a-dabe-4e7f-a397-221bd6316971 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.297146] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001846fe-2a1a-4f51-a9a4-87d93fe17e12 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.327589] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35378412-e885-4e01-bd8c-b97f1ee84da4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.334493] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66eb6c7-ba1c-4b40-982c-65e1618cc4fe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.346920] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2149.366480] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698532, 'name': ReconfigVM_Task, 'duration_secs': 0.166557} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.366745] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance '538ae7a3-6e53-48ed-9b79-169175806dc4' progress to 33 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2149.849660] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2149.872537] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2149.872793] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2149.872967] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2149.873172] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2149.873320] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2149.873469] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2149.873676] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2149.873840] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2149.874038] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2149.874208] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2149.874380] env[63297]: DEBUG nova.virt.hardware [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2149.879879] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2149.880394] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bfa3b40-454b-4be2-8272-7dccb04c2b44 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.899208] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2149.899208] env[63297]: value = "task-1698533" [ 2149.899208] env[63297]: _type = "Task" [ 2149.899208] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.906647] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698533, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.355087] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2150.355290] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.128s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2150.408729] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698533, 'name': ReconfigVM_Task, 'duration_secs': 0.151095} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.408982] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2150.409729] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b05ecce-ce26-4ffd-b70a-3a11f0cd1dad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.430436] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4/538ae7a3-6e53-48ed-9b79-169175806dc4.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2150.430657] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07bb980c-ad54-4fdf-868c-b69148d5d50c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.446900] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2150.446900] env[63297]: value = "task-1698534" [ 2150.446900] env[63297]: _type = "Task" [ 2150.446900] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.455146] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698534, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.956222] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698534, 'name': ReconfigVM_Task, 'duration_secs': 0.24668} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.956544] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4/538ae7a3-6e53-48ed-9b79-169175806dc4.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2150.956749] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance '538ae7a3-6e53-48ed-9b79-169175806dc4' progress to 50 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2151.463499] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0dda93-e27a-422d-b1ba-625cb14e30a1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.482759] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364b4db6-3462-48e8-9cf5-7f6e2b8ca35f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.500362] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance '538ae7a3-6e53-48ed-9b79-169175806dc4' progress to 67 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2152.038996] env[63297]: DEBUG nova.network.neutron [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Port 41d0d073-3859-4e39-9e68-a1d33c810937 binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2153.065547] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.065897] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2153.066022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2153.356026] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2153.356204] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2153.356317] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2153.859988] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2153.860171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2153.860286] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2153.860439] env[63297]: DEBUG nova.objects.instance [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lazy-loading 'info_cache' on Instance uuid 538ae7a3-6e53-48ed-9b79-169175806dc4 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2154.102276] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2155.562467] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2156.065026] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2156.065248] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2156.065480] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2156.065674] env[63297]: DEBUG nova.network.neutron [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2156.754801] env[63297]: DEBUG nova.network.neutron [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2157.257359] env[63297]: DEBUG oslo_concurrency.lockutils [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2157.782950] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8eb03a8-cf42-4dfa-ae35-0b8c24dc60b9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.802961] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4785fe3-b90a-4b04-a2c4-8c75f97b85eb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.809878] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance '538ae7a3-6e53-48ed-9b79-169175806dc4' progress to 83 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2158.316756] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2158.317061] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed732c03-7776-4eea-820f-141a9fc257be {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.324401] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2158.324401] env[63297]: value = "task-1698535" [ 2158.324401] env[63297]: _type = "Task" [ 2158.324401] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.332882] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698535, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.836811] env[63297]: DEBUG oslo_vmware.api [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698535, 'name': PowerOnVM_Task, 'duration_secs': 0.387933} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.837162] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2158.837248] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-97136e22-5244-4538-b6de-c2e118cf4a6e tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance '538ae7a3-6e53-48ed-9b79-169175806dc4' progress to 100 {{(pid=63297) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2161.572012] env[63297]: DEBUG nova.network.neutron [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Port 41d0d073-3859-4e39-9e68-a1d33c810937 binding to destination host cpu-1 is already ACTIVE {{(pid=63297) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2161.572288] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.572440] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.572604] env[63297]: DEBUG nova.network.neutron [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2162.497620] env[63297]: DEBUG nova.network.neutron [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2163.000548] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.504507] env[63297]: DEBUG nova.compute.manager [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63297) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 2163.504741] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2163.504976] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2164.008654] env[63297]: DEBUG nova.objects.instance [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'migration_context' on Instance uuid 538ae7a3-6e53-48ed-9b79-169175806dc4 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2164.552288] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4d5823-23e5-4b3f-a79c-88f3f70ffea3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.559489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62db349f-5583-42ac-aa9a-014dd5505685 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.589066] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46283d36-7350-4b2f-8985-c457dbfc8a3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.595647] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ae13ac-9578-410e-9333-34986baef4bd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.607821] env[63297]: DEBUG nova.compute.provider_tree [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2165.111394] env[63297]: DEBUG nova.scheduler.client.report [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2166.123890] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.619s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.658022] env[63297]: INFO nova.compute.manager [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Swapping old allocation on dict_keys(['88960333-a089-4255-ad72-5c02d57b2b35']) held by migration 65523337-f7d1-403b-88eb-30d7b7191e18 for instance [ 2167.678358] env[63297]: DEBUG nova.scheduler.client.report [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Overwriting current allocation {'allocations': {'88960333-a089-4255-ad72-5c02d57b2b35': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 189}}, 'project_id': '612e80df32dc4fb39e2fcf28c0c0a80b', 'user_id': 'acb339bdff424582a2aad0578cc255eb', 'consumer_generation': 1} on consumer 538ae7a3-6e53-48ed-9b79-169175806dc4 {{(pid=63297) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2167.754644] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.754862] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.755055] env[63297]: DEBUG nova.network.neutron [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2168.461244] env[63297]: DEBUG nova.network.neutron [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [{"id": "41d0d073-3859-4e39-9e68-a1d33c810937", "address": "fa:16:3e:81:23:7e", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d0d073-38", "ovs_interfaceid": "41d0d073-3859-4e39-9e68-a1d33c810937", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2168.964326] env[63297]: DEBUG oslo_concurrency.lockutils [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-538ae7a3-6e53-48ed-9b79-169175806dc4" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2168.964935] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2168.965304] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9190a9c6-adb5-4ead-a797-463611dbec83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.972606] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2168.972606] env[63297]: value = "task-1698536" [ 2168.972606] env[63297]: _type = "Task" [ 2168.972606] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.983978] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.482461] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698536, 'name': PowerOffVM_Task, 'duration_secs': 0.200903} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.482726] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2169.483398] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2169.483615] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2169.483771] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2169.483949] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2169.484112] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2169.484291] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2169.484512] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2169.484674] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2169.484838] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2169.485010] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2169.485195] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2169.490005] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab6224f0-5940-4af6-96e4-c35df33c43af {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.505355] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2169.505355] env[63297]: value = "task-1698537" [ 2169.505355] env[63297]: _type = "Task" [ 2169.505355] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.512642] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698537, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.015189] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698537, 'name': ReconfigVM_Task, 'duration_secs': 0.142287} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.016031] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf79cbf-d68b-4c17-9660-db7a9ce129ec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.033597] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2170.033823] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2170.033982] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2170.034206] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2170.034358] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2170.034509] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2170.034706] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2170.034864] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2170.035041] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2170.035214] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2170.035413] env[63297]: DEBUG nova.virt.hardware [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2170.036184] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68a3925d-b49c-48df-b11d-fd3a86e66ff4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.041066] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2170.041066] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]520c8670-db02-f7b6-534e-201bd76be65b" [ 2170.041066] env[63297]: _type = "Task" [ 2170.041066] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.048450] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520c8670-db02-f7b6-534e-201bd76be65b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.551475] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]520c8670-db02-f7b6-534e-201bd76be65b, 'name': SearchDatastore_Task, 'duration_secs': 0.007115} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.556895] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2170.557198] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5be5ea52-41ef-4aa1-b1e3-cfb44d9dada6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.574685] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2170.574685] env[63297]: value = "task-1698538" [ 2170.574685] env[63297]: _type = "Task" [ 2170.574685] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.581962] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698538, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.084859] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698538, 'name': ReconfigVM_Task, 'duration_secs': 0.204469} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.084859] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=63297) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2171.085658] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1a164f-954e-4828-966e-f7fb9ac82877 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.108482] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4/538ae7a3-6e53-48ed-9b79-169175806dc4.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2171.108750] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-418fc6f8-d85e-4f4d-98dc-fc963358ca8f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.126168] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2171.126168] env[63297]: value = "task-1698539" [ 2171.126168] env[63297]: _type = "Task" [ 2171.126168] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.137604] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698539, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.636168] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698539, 'name': ReconfigVM_Task, 'duration_secs': 0.278036} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.636521] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4/538ae7a3-6e53-48ed-9b79-169175806dc4.vmdk or device None with type thin {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2171.637395] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dd29fe-1aaa-4b89-8fc8-a6944f8ce65a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.655107] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7922fc4f-2f64-43ee-bb9f-e0e586949f50 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.671921] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cf6f28-5bd7-4f93-942f-bd58599bacdd {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.688445] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d488e17-f8aa-46f6-8f0e-b5a3b541fb61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.694263] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2171.694471] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0983f66-dc71-4c1b-9dcd-4b8d1cb7fc5b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.699824] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2171.699824] env[63297]: value = "task-1698540" [ 2171.699824] env[63297]: _type = "Task" [ 2171.699824] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.706850] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698540, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.209364] env[63297]: DEBUG oslo_vmware.api [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698540, 'name': PowerOnVM_Task, 'duration_secs': 0.352368} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.209645] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2173.221315] env[63297]: INFO nova.compute.manager [None req-777b6655-cb5f-492c-b1bd-8fa37c36ea09 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance to original state: 'active' [ 2174.172087] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "538ae7a3-6e53-48ed-9b79-169175806dc4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.172430] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.172657] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.172859] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.173060] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2174.175373] env[63297]: INFO nova.compute.manager [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Terminating instance [ 2174.177625] env[63297]: DEBUG nova.compute.manager [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2174.177625] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2174.178446] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b31570-308b-4e26-bcaf-a9db0a6f977c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.186979] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2174.187253] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91660579-790f-428b-8ddb-f3d6fc53c3c6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.193424] env[63297]: DEBUG oslo_vmware.api [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2174.193424] env[63297]: value = "task-1698541" [ 2174.193424] env[63297]: _type = "Task" [ 2174.193424] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.201060] env[63297]: DEBUG oslo_vmware.api [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.704143] env[63297]: DEBUG oslo_vmware.api [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698541, 'name': PowerOffVM_Task, 'duration_secs': 0.185527} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.704497] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2174.704585] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2174.704747] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-020c5198-99c0-45c1-b62b-0ba7bef60949 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.770454] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2174.770724] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2174.770874] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleting the datastore file [datastore1] 538ae7a3-6e53-48ed-9b79-169175806dc4 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2174.771146] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ac8d140-7795-4fda-bad4-094d5cf0fc6d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.777132] env[63297]: DEBUG oslo_vmware.api [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2174.777132] env[63297]: value = "task-1698543" [ 2174.777132] env[63297]: _type = "Task" [ 2174.777132] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.784688] env[63297]: DEBUG oslo_vmware.api [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.286914] env[63297]: DEBUG oslo_vmware.api [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150832} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.287190] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2175.287376] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2175.287547] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2175.287724] env[63297]: INFO nova.compute.manager [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2175.287958] env[63297]: DEBUG oslo.service.loopingcall [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2175.288159] env[63297]: DEBUG nova.compute.manager [-] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2175.288256] env[63297]: DEBUG nova.network.neutron [-] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2175.739413] env[63297]: DEBUG nova.compute.manager [req-fd5e430c-f4bb-4302-a851-f5c5af5a7cea req-bc904b9c-8bb8-4368-9f83-37f4a912c135 service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Received event network-vif-deleted-41d0d073-3859-4e39-9e68-a1d33c810937 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2175.739660] env[63297]: INFO nova.compute.manager [req-fd5e430c-f4bb-4302-a851-f5c5af5a7cea req-bc904b9c-8bb8-4368-9f83-37f4a912c135 service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Neutron deleted interface 41d0d073-3859-4e39-9e68-a1d33c810937; detaching it from the instance and deleting it from the info cache [ 2175.739987] env[63297]: DEBUG nova.network.neutron [req-fd5e430c-f4bb-4302-a851-f5c5af5a7cea req-bc904b9c-8bb8-4368-9f83-37f4a912c135 service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.187104] env[63297]: DEBUG nova.network.neutron [-] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.242848] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24c57ca9-2662-4c8d-a75a-a392f3c47140 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.252614] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cc8714-b44d-4b7b-9d0a-c670086c20f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.277494] env[63297]: DEBUG nova.compute.manager [req-fd5e430c-f4bb-4302-a851-f5c5af5a7cea req-bc904b9c-8bb8-4368-9f83-37f4a912c135 service nova] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Detach interface failed, port_id=41d0d073-3859-4e39-9e68-a1d33c810937, reason: Instance 538ae7a3-6e53-48ed-9b79-169175806dc4 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2176.689612] env[63297]: INFO nova.compute.manager [-] [instance: 538ae7a3-6e53-48ed-9b79-169175806dc4] Took 1.40 seconds to deallocate network for instance. [ 2177.196730] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.197089] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2177.197203] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2177.218590] env[63297]: INFO nova.scheduler.client.report [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted allocations for instance 538ae7a3-6e53-48ed-9b79-169175806dc4 [ 2177.726711] env[63297]: DEBUG oslo_concurrency.lockutils [None req-71c12b8f-a9e0-4ec3-807a-7e4c94a24c92 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "538ae7a3-6e53-48ed-9b79-169175806dc4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.554s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.970231] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "83e308e5-2da6-441f-a4df-64ef2d766670" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.970512] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "83e308e5-2da6-441f-a4df-64ef2d766670" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.473269] env[63297]: DEBUG nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2179.991276] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.991560] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.993121] env[63297]: INFO nova.compute.claims [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2181.029385] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805fe8c9-2c0a-4bce-8878-0e8c88d25758 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.036658] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd2db3e-4898-4c28-9105-3f4153a689c8 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.066566] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63291ac-9cde-44c4-a6a1-d3e0f7fa57f1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.072919] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47589bc2-8f58-481f-88f1-6959036f09d1 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.085222] env[63297]: DEBUG nova.compute.provider_tree [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2181.588635] env[63297]: DEBUG nova.scheduler.client.report [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2182.094687] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.095249] env[63297]: DEBUG nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2182.601309] env[63297]: DEBUG nova.compute.utils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2182.602765] env[63297]: DEBUG nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2182.602933] env[63297]: DEBUG nova.network.neutron [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2182.658387] env[63297]: DEBUG nova.policy [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'acb339bdff424582a2aad0578cc255eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '612e80df32dc4fb39e2fcf28c0c0a80b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 2182.927998] env[63297]: DEBUG nova.network.neutron [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Successfully created port: 090b393d-157b-4446-b526-af0688880009 {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2183.106691] env[63297]: DEBUG nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2184.117026] env[63297]: DEBUG nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2184.142434] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2184.142681] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2184.142837] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2184.143025] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2184.143179] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2184.143324] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2184.143523] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2184.143677] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2184.143839] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2184.144029] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2184.144235] env[63297]: DEBUG nova.virt.hardware [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2184.145142] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f9ae9f-ddba-46d7-8a6f-5dfec3f72803 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.153214] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11560184-782d-47d5-9594-b3da398a020b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.300008] env[63297]: DEBUG nova.compute.manager [req-8f069e91-9228-40f8-87c4-0e51f7af9c39 req-cf00c656-40d3-4dfa-8bab-4dbc83f69aa6 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Received event network-vif-plugged-090b393d-157b-4446-b526-af0688880009 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2184.300242] env[63297]: DEBUG oslo_concurrency.lockutils [req-8f069e91-9228-40f8-87c4-0e51f7af9c39 req-cf00c656-40d3-4dfa-8bab-4dbc83f69aa6 service nova] Acquiring lock "83e308e5-2da6-441f-a4df-64ef2d766670-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.300449] env[63297]: DEBUG oslo_concurrency.lockutils [req-8f069e91-9228-40f8-87c4-0e51f7af9c39 req-cf00c656-40d3-4dfa-8bab-4dbc83f69aa6 service nova] Lock "83e308e5-2da6-441f-a4df-64ef2d766670-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.300615] env[63297]: DEBUG oslo_concurrency.lockutils [req-8f069e91-9228-40f8-87c4-0e51f7af9c39 req-cf00c656-40d3-4dfa-8bab-4dbc83f69aa6 service nova] Lock "83e308e5-2da6-441f-a4df-64ef2d766670-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.300780] env[63297]: DEBUG nova.compute.manager [req-8f069e91-9228-40f8-87c4-0e51f7af9c39 req-cf00c656-40d3-4dfa-8bab-4dbc83f69aa6 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] No waiting events found dispatching network-vif-plugged-090b393d-157b-4446-b526-af0688880009 {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2184.300940] env[63297]: WARNING nova.compute.manager [req-8f069e91-9228-40f8-87c4-0e51f7af9c39 req-cf00c656-40d3-4dfa-8bab-4dbc83f69aa6 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Received unexpected event network-vif-plugged-090b393d-157b-4446-b526-af0688880009 for instance with vm_state building and task_state spawning. [ 2184.384070] env[63297]: DEBUG nova.network.neutron [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Successfully updated port: 090b393d-157b-4446-b526-af0688880009 {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2184.886670] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.886837] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.887874] env[63297]: DEBUG nova.network.neutron [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2185.419445] env[63297]: DEBUG nova.network.neutron [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2185.539970] env[63297]: DEBUG nova.network.neutron [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updating instance_info_cache with network_info: [{"id": "090b393d-157b-4446-b526-af0688880009", "address": "fa:16:3e:2b:32:6c", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b393d-15", "ovs_interfaceid": "090b393d-157b-4446-b526-af0688880009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2186.042229] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2186.042563] env[63297]: DEBUG nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Instance network_info: |[{"id": "090b393d-157b-4446-b526-af0688880009", "address": "fa:16:3e:2b:32:6c", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b393d-15", "ovs_interfaceid": "090b393d-157b-4446-b526-af0688880009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2186.043012] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:32:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '090b393d-157b-4446-b526-af0688880009', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2186.050644] env[63297]: DEBUG oslo.service.loopingcall [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2186.050853] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2186.051103] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca124f06-a579-4909-886b-d315ddde6bef {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.071140] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2186.071140] env[63297]: value = "task-1698544" [ 2186.071140] env[63297]: _type = "Task" [ 2186.071140] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.079636] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698544, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.326048] env[63297]: DEBUG nova.compute.manager [req-c684bb60-0a45-4341-8913-bb5a9b505610 req-acae50b9-5aaa-4cf4-a788-df1ce086ceb8 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Received event network-changed-090b393d-157b-4446-b526-af0688880009 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2186.326283] env[63297]: DEBUG nova.compute.manager [req-c684bb60-0a45-4341-8913-bb5a9b505610 req-acae50b9-5aaa-4cf4-a788-df1ce086ceb8 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Refreshing instance network info cache due to event network-changed-090b393d-157b-4446-b526-af0688880009. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2186.326516] env[63297]: DEBUG oslo_concurrency.lockutils [req-c684bb60-0a45-4341-8913-bb5a9b505610 req-acae50b9-5aaa-4cf4-a788-df1ce086ceb8 service nova] Acquiring lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.326747] env[63297]: DEBUG oslo_concurrency.lockutils [req-c684bb60-0a45-4341-8913-bb5a9b505610 req-acae50b9-5aaa-4cf4-a788-df1ce086ceb8 service nova] Acquired lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.326882] env[63297]: DEBUG nova.network.neutron [req-c684bb60-0a45-4341-8913-bb5a9b505610 req-acae50b9-5aaa-4cf4-a788-df1ce086ceb8 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Refreshing network info cache for port 090b393d-157b-4446-b526-af0688880009 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2186.581375] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698544, 'name': CreateVM_Task, 'duration_secs': 0.290076} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.581682] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2186.582216] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.582392] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.582746] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2186.583009] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d866492c-59f9-415d-9dbb-cfa124abec07 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.587502] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2186.587502] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52241519-0112-22b7-69aa-05faf7ba6ab7" [ 2186.587502] env[63297]: _type = "Task" [ 2186.587502] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.594790] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52241519-0112-22b7-69aa-05faf7ba6ab7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.025476] env[63297]: DEBUG nova.network.neutron [req-c684bb60-0a45-4341-8913-bb5a9b505610 req-acae50b9-5aaa-4cf4-a788-df1ce086ceb8 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updated VIF entry in instance network info cache for port 090b393d-157b-4446-b526-af0688880009. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2187.025829] env[63297]: DEBUG nova.network.neutron [req-c684bb60-0a45-4341-8913-bb5a9b505610 req-acae50b9-5aaa-4cf4-a788-df1ce086ceb8 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updating instance_info_cache with network_info: [{"id": "090b393d-157b-4446-b526-af0688880009", "address": "fa:16:3e:2b:32:6c", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b393d-15", "ovs_interfaceid": "090b393d-157b-4446-b526-af0688880009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2187.096505] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52241519-0112-22b7-69aa-05faf7ba6ab7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.528924] env[63297]: DEBUG oslo_concurrency.lockutils [req-c684bb60-0a45-4341-8913-bb5a9b505610 req-acae50b9-5aaa-4cf4-a788-df1ce086ceb8 service nova] Releasing lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.597903] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52241519-0112-22b7-69aa-05faf7ba6ab7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.099055] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52241519-0112-22b7-69aa-05faf7ba6ab7, 'name': SearchDatastore_Task, 'duration_secs': 1.348765} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.099055] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2188.099256] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2188.099507] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2188.099636] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2188.099818] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2188.100081] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2395b26f-d1db-4d02-b8bf-b8663b5bc1d2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.107999] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2188.108187] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2188.108858] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81495a11-ff70-4436-bfaf-b8112aefbfec {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.113446] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2188.113446] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]522e0bf0-3b32-fec7-7bc0-ae1a528ad167" [ 2188.113446] env[63297]: _type = "Task" [ 2188.113446] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.121717] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522e0bf0-3b32-fec7-7bc0-ae1a528ad167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.623310] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]522e0bf0-3b32-fec7-7bc0-ae1a528ad167, 'name': SearchDatastore_Task, 'duration_secs': 0.007873} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.624121] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2e37950-60b3-4868-b70b-17daf6fabb11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.628822] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2188.628822] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d7124e-acc7-c79d-a948-c59342b9b3d8" [ 2188.628822] env[63297]: _type = "Task" [ 2188.628822] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.635636] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d7124e-acc7-c79d-a948-c59342b9b3d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.139329] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52d7124e-acc7-c79d-a948-c59342b9b3d8, 'name': SearchDatastore_Task, 'duration_secs': 0.008878} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.139574] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2189.139829] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 83e308e5-2da6-441f-a4df-64ef2d766670/83e308e5-2da6-441f-a4df-64ef2d766670.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2189.140089] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93828a32-fd6c-4f07-ab9a-d31081896a49 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.146786] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2189.146786] env[63297]: value = "task-1698545" [ 2189.146786] env[63297]: _type = "Task" [ 2189.146786] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.154029] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698545, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.656319] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698545, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.416966} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.656682] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 83e308e5-2da6-441f-a4df-64ef2d766670/83e308e5-2da6-441f-a4df-64ef2d766670.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2189.656812] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2189.657073] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25adc009-e5ea-4234-9faa-51c654ec029f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.663761] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2189.663761] env[63297]: value = "task-1698546" [ 2189.663761] env[63297]: _type = "Task" [ 2189.663761] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.671913] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698546, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.173603] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698546, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058266} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.173882] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2190.174677] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cbb238-ac47-402d-9459-a9586d0f88cb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.196100] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 83e308e5-2da6-441f-a4df-64ef2d766670/83e308e5-2da6-441f-a4df-64ef2d766670.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2190.196339] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-700eab88-4c76-4a84-862f-612752fa4f83 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.215467] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2190.215467] env[63297]: value = "task-1698547" [ 2190.215467] env[63297]: _type = "Task" [ 2190.215467] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.222435] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698547, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.725718] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698547, 'name': ReconfigVM_Task, 'duration_secs': 0.271127} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.726116] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 83e308e5-2da6-441f-a4df-64ef2d766670/83e308e5-2da6-441f-a4df-64ef2d766670.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2190.726714] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acf42167-4434-4ea8-9802-b720ca605d05 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.732433] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2190.732433] env[63297]: value = "task-1698548" [ 2190.732433] env[63297]: _type = "Task" [ 2190.732433] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.740593] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698548, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.242549] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698548, 'name': Rename_Task, 'duration_secs': 0.127424} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.242807] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2191.243057] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55b50fc0-d603-4da5-9e52-d2db4f1fd57a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.250752] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2191.250752] env[63297]: value = "task-1698549" [ 2191.250752] env[63297]: _type = "Task" [ 2191.250752] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.259748] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.761217] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698549, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.262672] env[63297]: DEBUG oslo_vmware.api [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698549, 'name': PowerOnVM_Task, 'duration_secs': 0.534758} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.262939] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2192.263170] env[63297]: INFO nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Took 8.15 seconds to spawn the instance on the hypervisor. [ 2192.263355] env[63297]: DEBUG nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2192.264125] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425be1c6-90d2-4864-a636-c41b5f9f87f3 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.780496] env[63297]: INFO nova.compute.manager [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Took 12.80 seconds to build instance. [ 2193.283261] env[63297]: DEBUG oslo_concurrency.lockutils [None req-133a0e37-01b5-470b-aad1-b7c02e363ac7 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "83e308e5-2da6-441f-a4df-64ef2d766670" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.313s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.359484] env[63297]: DEBUG nova.compute.manager [req-122b46ce-c2d9-4477-ac5f-e43bc002667f req-a529627b-32ba-4e9f-9c39-a594a296a27f service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Received event network-changed-090b393d-157b-4446-b526-af0688880009 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2193.359697] env[63297]: DEBUG nova.compute.manager [req-122b46ce-c2d9-4477-ac5f-e43bc002667f req-a529627b-32ba-4e9f-9c39-a594a296a27f service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Refreshing instance network info cache due to event network-changed-090b393d-157b-4446-b526-af0688880009. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2193.359915] env[63297]: DEBUG oslo_concurrency.lockutils [req-122b46ce-c2d9-4477-ac5f-e43bc002667f req-a529627b-32ba-4e9f-9c39-a594a296a27f service nova] Acquiring lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2193.360887] env[63297]: DEBUG oslo_concurrency.lockutils [req-122b46ce-c2d9-4477-ac5f-e43bc002667f req-a529627b-32ba-4e9f-9c39-a594a296a27f service nova] Acquired lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2193.361150] env[63297]: DEBUG nova.network.neutron [req-122b46ce-c2d9-4477-ac5f-e43bc002667f req-a529627b-32ba-4e9f-9c39-a594a296a27f service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Refreshing network info cache for port 090b393d-157b-4446-b526-af0688880009 {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2194.069842] env[63297]: DEBUG nova.network.neutron [req-122b46ce-c2d9-4477-ac5f-e43bc002667f req-a529627b-32ba-4e9f-9c39-a594a296a27f service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updated VIF entry in instance network info cache for port 090b393d-157b-4446-b526-af0688880009. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2194.070241] env[63297]: DEBUG nova.network.neutron [req-122b46ce-c2d9-4477-ac5f-e43bc002667f req-a529627b-32ba-4e9f-9c39-a594a296a27f service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updating instance_info_cache with network_info: [{"id": "090b393d-157b-4446-b526-af0688880009", "address": "fa:16:3e:2b:32:6c", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b393d-15", "ovs_interfaceid": "090b393d-157b-4446-b526-af0688880009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2194.572924] env[63297]: DEBUG oslo_concurrency.lockutils [req-122b46ce-c2d9-4477-ac5f-e43bc002667f req-a529627b-32ba-4e9f-9c39-a594a296a27f service nova] Releasing lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.665465] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.665816] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.665593] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.665931] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2205.666165] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2207.661062] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2208.167620] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2208.167620] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2209.166331] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2209.665412] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.169081] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.169391] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.169497] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.169653] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2210.170585] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6062e2e-a0e9-44fd-9d65-64fbd96aaed4 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.179261] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe685c96-3f5d-4565-9f91-43749de55eeb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.193489] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc6007f-5cae-455b-b708-026d70ef5b87 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.199356] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71087bf-ad94-459d-a9ca-1a5347edac8a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.227324] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181219MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2210.227464] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.227647] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.251805] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Instance 83e308e5-2da6-441f-a4df-64ef2d766670 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63297) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2211.252077] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2211.252173] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2211.276807] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860811d6-19be-4c91-bb82-8e7e072e892d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.283903] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5b1957-6bfb-4609-bcc4-9e3d4ba95076 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.313731] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348bddc3-a7d0-47c5-b2f4-51b0f740eb6b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.320762] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b8a1a0-05ad-4f9f-a651-2c8a9c36d98e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.333595] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2211.837045] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2212.342367] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2212.342741] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.343716] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.344104] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2214.344238] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2214.875767] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2214.875917] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquired lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2214.876079] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Forcefully refreshing network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2214.876244] env[63297]: DEBUG nova.objects.instance [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lazy-loading 'info_cache' on Instance uuid 83e308e5-2da6-441f-a4df-64ef2d766670 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2216.581058] env[63297]: DEBUG nova.network.neutron [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updating instance_info_cache with network_info: [{"id": "090b393d-157b-4446-b526-af0688880009", "address": "fa:16:3e:2b:32:6c", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b393d-15", "ovs_interfaceid": "090b393d-157b-4446-b526-af0688880009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2217.084955] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Releasing lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.085125] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updated the network info_cache for instance {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2228.652263] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "83e308e5-2da6-441f-a4df-64ef2d766670" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2228.652632] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "83e308e5-2da6-441f-a4df-64ef2d766670" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2228.652722] env[63297]: DEBUG nova.compute.manager [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2228.653785] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6183f0-9b93-456c-a752-b12bb8276880 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.661187] env[63297]: DEBUG nova.compute.manager [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63297) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2228.661784] env[63297]: DEBUG nova.objects.instance [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'flavor' on Instance uuid 83e308e5-2da6-441f-a4df-64ef2d766670 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2229.167853] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2229.168134] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4afa8440-a686-41ca-9f2d-b171df040f21 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.175658] env[63297]: DEBUG oslo_vmware.api [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2229.175658] env[63297]: value = "task-1698550" [ 2229.175658] env[63297]: _type = "Task" [ 2229.175658] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.183732] env[63297]: DEBUG oslo_vmware.api [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698550, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.685280] env[63297]: DEBUG oslo_vmware.api [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698550, 'name': PowerOffVM_Task, 'duration_secs': 0.169079} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.685644] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2229.685690] env[63297]: DEBUG nova.compute.manager [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2229.686488] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce97a500-f735-480a-bf73-888d4a74ab14 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.198074] env[63297]: DEBUG oslo_concurrency.lockutils [None req-4571c6e5-4815-4b12-aa21-d1b2c9ce5424 tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "83e308e5-2da6-441f-a4df-64ef2d766670" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.545s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.041893] env[63297]: DEBUG nova.objects.instance [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'flavor' on Instance uuid 83e308e5-2da6-441f-a4df-64ef2d766670 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2231.546779] env[63297]: DEBUG oslo_concurrency.lockutils [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2231.546923] env[63297]: DEBUG oslo_concurrency.lockutils [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2231.547097] env[63297]: DEBUG nova.network.neutron [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2231.547273] env[63297]: DEBUG nova.objects.instance [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'info_cache' on Instance uuid 83e308e5-2da6-441f-a4df-64ef2d766670 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2232.051174] env[63297]: DEBUG nova.objects.base [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Object Instance<83e308e5-2da6-441f-a4df-64ef2d766670> lazy-loaded attributes: flavor,info_cache {{(pid=63297) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2232.758613] env[63297]: DEBUG nova.network.neutron [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updating instance_info_cache with network_info: [{"id": "090b393d-157b-4446-b526-af0688880009", "address": "fa:16:3e:2b:32:6c", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b393d-15", "ovs_interfaceid": "090b393d-157b-4446-b526-af0688880009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2233.261980] env[63297]: DEBUG oslo_concurrency.lockutils [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2233.766454] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2233.766831] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0cdf1152-1c43-436a-9aea-a4047fbb6b0c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.774751] env[63297]: DEBUG oslo_vmware.api [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2233.774751] env[63297]: value = "task-1698551" [ 2233.774751] env[63297]: _type = "Task" [ 2233.774751] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.783244] env[63297]: DEBUG oslo_vmware.api [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2234.284524] env[63297]: DEBUG oslo_vmware.api [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698551, 'name': PowerOnVM_Task, 'duration_secs': 0.348493} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.284883] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2234.285014] env[63297]: DEBUG nova.compute.manager [None req-19d87866-2bf0-4cd2-8f99-7e1bdf6f558b tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2234.285691] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b827eb-6b6f-482c-a4f8-0c78d3481418 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.981932] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aed5bda-8329-45d7-81cc-8ad90b34931c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.988632] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7f5b86ae-c7bb-4936-8408-8b38b8deac7f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Suspending the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2235.988869] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ae1dd7bf-ae47-4e39-95d4-ed5625a2250e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.995355] env[63297]: DEBUG oslo_vmware.api [None req-7f5b86ae-c7bb-4936-8408-8b38b8deac7f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2235.995355] env[63297]: value = "task-1698552" [ 2235.995355] env[63297]: _type = "Task" [ 2235.995355] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2236.002647] env[63297]: DEBUG oslo_vmware.api [None req-7f5b86ae-c7bb-4936-8408-8b38b8deac7f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698552, 'name': SuspendVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.505961] env[63297]: DEBUG oslo_vmware.api [None req-7f5b86ae-c7bb-4936-8408-8b38b8deac7f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698552, 'name': SuspendVM_Task} progress is 70%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.006232] env[63297]: DEBUG oslo_vmware.api [None req-7f5b86ae-c7bb-4936-8408-8b38b8deac7f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698552, 'name': SuspendVM_Task, 'duration_secs': 0.554208} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.006576] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-7f5b86ae-c7bb-4936-8408-8b38b8deac7f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Suspended the VM {{(pid=63297) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2237.006738] env[63297]: DEBUG nova.compute.manager [None req-7f5b86ae-c7bb-4936-8408-8b38b8deac7f tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2237.007550] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dc1eb4-1e59-4290-877d-2b4cdd745932 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.303191] env[63297]: INFO nova.compute.manager [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Resuming [ 2238.303863] env[63297]: DEBUG nova.objects.instance [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'flavor' on Instance uuid 83e308e5-2da6-441f-a4df-64ef2d766670 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2239.312133] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.312472] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquired lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2239.312472] env[63297]: DEBUG nova.network.neutron [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2240.011233] env[63297]: DEBUG nova.network.neutron [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updating instance_info_cache with network_info: [{"id": "090b393d-157b-4446-b526-af0688880009", "address": "fa:16:3e:2b:32:6c", "network": {"id": "d408bb0f-d7aa-44e0-aedf-d1ce0ad4c0c4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-67740740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "612e80df32dc4fb39e2fcf28c0c0a80b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b393d-15", "ovs_interfaceid": "090b393d-157b-4446-b526-af0688880009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2240.513967] env[63297]: DEBUG oslo_concurrency.lockutils [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Releasing lock "refresh_cache-83e308e5-2da6-441f-a4df-64ef2d766670" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2240.515458] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e43c657-386a-4690-9568-e2153f2d7f4f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.521902] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Resuming the VM {{(pid=63297) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2240.522136] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eed70355-a0f7-499d-b354-114b45dd7225 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.530176] env[63297]: DEBUG oslo_vmware.api [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2240.530176] env[63297]: value = "task-1698553" [ 2240.530176] env[63297]: _type = "Task" [ 2240.530176] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.537410] env[63297]: DEBUG oslo_vmware.api [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.043768] env[63297]: DEBUG oslo_vmware.api [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698553, 'name': PowerOnVM_Task, 'duration_secs': 0.475659} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2241.044184] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Resumed the VM {{(pid=63297) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2241.044517] env[63297]: DEBUG nova.compute.manager [None req-2ba61d58-f7fe-48c0-b31c-568d6cc6336d tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2241.045747] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8405ce57-0e05-458b-bab3-56029e09285c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.897544] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "83e308e5-2da6-441f-a4df-64ef2d766670" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2241.897899] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "83e308e5-2da6-441f-a4df-64ef2d766670" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2241.898022] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "83e308e5-2da6-441f-a4df-64ef2d766670-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2241.898208] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "83e308e5-2da6-441f-a4df-64ef2d766670-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2241.898380] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "83e308e5-2da6-441f-a4df-64ef2d766670-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2241.900541] env[63297]: INFO nova.compute.manager [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Terminating instance [ 2241.902539] env[63297]: DEBUG nova.compute.manager [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2241.902739] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2241.903666] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19efeaee-3576-4c68-9ebf-998fb1f2fd68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.911382] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2241.911602] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a771730-f25e-4b77-83a0-e83dfb7956db {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.917523] env[63297]: DEBUG oslo_vmware.api [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2241.917523] env[63297]: value = "task-1698554" [ 2241.917523] env[63297]: _type = "Task" [ 2241.917523] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2241.925517] env[63297]: DEBUG oslo_vmware.api [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.427712] env[63297]: DEBUG oslo_vmware.api [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698554, 'name': PowerOffVM_Task, 'duration_secs': 0.188557} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2242.427949] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2242.428136] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2242.428383] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a219c3c-b2cf-4e35-98a3-d18df37995aa {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.493686] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2242.493970] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2242.494288] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleting the datastore file [datastore1] 83e308e5-2da6-441f-a4df-64ef2d766670 {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2242.494614] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ba716f5-91da-4a46-b731-dd2cef018e73 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.501509] env[63297]: DEBUG oslo_vmware.api [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for the task: (returnval){ [ 2242.501509] env[63297]: value = "task-1698556" [ 2242.501509] env[63297]: _type = "Task" [ 2242.501509] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.511886] env[63297]: DEBUG oslo_vmware.api [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.010979] env[63297]: DEBUG oslo_vmware.api [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Task: {'id': task-1698556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152287} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.011506] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2243.011506] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2243.011714] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2243.011928] env[63297]: INFO nova.compute.manager [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2243.012315] env[63297]: DEBUG oslo.service.loopingcall [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2243.012602] env[63297]: DEBUG nova.compute.manager [-] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2243.012732] env[63297]: DEBUG nova.network.neutron [-] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2243.423182] env[63297]: DEBUG nova.compute.manager [req-12f2f784-aec3-4e71-9653-8367c008a8c3 req-c40b359f-758c-4cb8-bcd6-36c727fb8742 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Received event network-vif-deleted-090b393d-157b-4446-b526-af0688880009 {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2243.423182] env[63297]: INFO nova.compute.manager [req-12f2f784-aec3-4e71-9653-8367c008a8c3 req-c40b359f-758c-4cb8-bcd6-36c727fb8742 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Neutron deleted interface 090b393d-157b-4446-b526-af0688880009; detaching it from the instance and deleting it from the info cache [ 2243.423182] env[63297]: DEBUG nova.network.neutron [req-12f2f784-aec3-4e71-9653-8367c008a8c3 req-c40b359f-758c-4cb8-bcd6-36c727fb8742 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2243.902856] env[63297]: DEBUG nova.network.neutron [-] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2243.925035] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d48c8285-8184-4d51-87d1-d633f6fa93a6 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.935016] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf50c221-27b5-45b7-903e-8ce82240b6bb {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.958725] env[63297]: DEBUG nova.compute.manager [req-12f2f784-aec3-4e71-9653-8367c008a8c3 req-c40b359f-758c-4cb8-bcd6-36c727fb8742 service nova] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Detach interface failed, port_id=090b393d-157b-4446-b526-af0688880009, reason: Instance 83e308e5-2da6-441f-a4df-64ef2d766670 could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2244.404911] env[63297]: INFO nova.compute.manager [-] [instance: 83e308e5-2da6-441f-a4df-64ef2d766670] Took 1.39 seconds to deallocate network for instance. [ 2244.912526] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2244.912807] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.913038] env[63297]: DEBUG nova.objects.instance [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lazy-loading 'resources' on Instance uuid 83e308e5-2da6-441f-a4df-64ef2d766670 {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2245.447972] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16c0dc6-113c-4eb6-8789-8079ee4a3394 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.455772] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc814e22-9012-4c00-a9ab-d2508e26d246 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.486589] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cb1635-195f-4d80-a965-754174089ffe {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.493447] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b8a1b7-4b1f-43c6-a288-5b90840afa3f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.506322] env[63297]: DEBUG nova.compute.provider_tree [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2246.026314] env[63297]: ERROR nova.scheduler.client.report [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] [req-8f3f4e3a-205d-4c06-b18c-f7f8abb295f7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8f3f4e3a-205d-4c06-b18c-f7f8abb295f7"}]} [ 2246.041786] env[63297]: DEBUG nova.scheduler.client.report [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2246.054955] env[63297]: DEBUG nova.scheduler.client.report [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2246.055197] env[63297]: DEBUG nova.compute.provider_tree [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2246.065587] env[63297]: DEBUG nova.scheduler.client.report [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2246.082612] env[63297]: DEBUG nova.scheduler.client.report [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2246.105028] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd079781-9fa5-4867-bf8a-484f4246089c {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.111757] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1593a333-1600-466d-a05e-e9bd4b7f2969 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.140642] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f726dd-6f95-4e8d-aa91-921ff77ee1df {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.147125] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb4dc58-ff9c-4af1-9a47-de98d6a34c1f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.160950] env[63297]: DEBUG nova.compute.provider_tree [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2246.689889] env[63297]: DEBUG nova.scheduler.client.report [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 191 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2246.690184] env[63297]: DEBUG nova.compute.provider_tree [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 191 to 192 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2246.690338] env[63297]: DEBUG nova.compute.provider_tree [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2247.195118] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.282s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2247.215668] env[63297]: INFO nova.scheduler.client.report [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Deleted allocations for instance 83e308e5-2da6-441f-a4df-64ef2d766670 [ 2247.723391] env[63297]: DEBUG oslo_concurrency.lockutils [None req-f51e5b8d-4c1b-4ec6-b58c-289693456edd tempest-ServerActionsTestJSON-70180264 tempest-ServerActionsTestJSON-70180264-project-member] Lock "83e308e5-2da6-441f-a4df-64ef2d766670" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.825s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.880658] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquiring lock "20007e28-079a-40a1-bd1f-eafd6a346dfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2252.880658] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "20007e28-079a-40a1-bd1f-eafd6a346dfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2253.383515] env[63297]: DEBUG nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Starting instance... {{(pid=63297) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2253.907898] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2253.908173] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2253.909800] env[63297]: INFO nova.compute.claims [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2254.947043] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b2b520-6571-42d7-ba60-38378dd397c2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.954956] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c69b039-03ce-464d-98db-0dcb33d1b153 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.983698] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6ae857-9c54-4d6c-af90-2db76952cf66 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.990298] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4b22e5-d64d-4af5-bb6b-7dc5165b0452 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.002456] env[63297]: DEBUG nova.compute.provider_tree [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2255.505356] env[63297]: DEBUG nova.scheduler.client.report [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2256.010729] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.102s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.011258] env[63297]: DEBUG nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Start building networks asynchronously for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2256.517089] env[63297]: DEBUG nova.compute.utils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Using /dev/sd instead of None {{(pid=63297) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2256.518604] env[63297]: DEBUG nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Allocating IP information in the background. {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2256.518784] env[63297]: DEBUG nova.network.neutron [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] allocate_for_instance() {{(pid=63297) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2256.557704] env[63297]: DEBUG nova.policy [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22f567b487024e4ba1a14e0b965a4f3a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '113bcd2c3af9493caddf933976a80add', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63297) authorize /opt/stack/nova/nova/policy.py:201}} [ 2256.785644] env[63297]: DEBUG nova.network.neutron [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Successfully created port: 8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d {{(pid=63297) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2257.022995] env[63297]: DEBUG nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Start building block device mappings for instance. {{(pid=63297) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2258.033040] env[63297]: DEBUG nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Start spawning the instance on the hypervisor. {{(pid=63297) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2258.060840] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-10T17:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-10T17:09:25Z,direct_url=,disk_format='vmdk',id=41f1ad71-37f2-4e86-a900-da4965eba44f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='766dfc413a924229a87b04aa69e3d966',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-10T17:09:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2258.061152] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Flavor limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2258.061338] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Image limits 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2258.061526] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Flavor pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2258.061678] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Image pref 0:0:0 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2258.061821] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63297) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2258.062038] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2258.062234] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2258.062382] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Got 1 possible topologies {{(pid=63297) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2258.062546] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2258.062719] env[63297]: DEBUG nova.virt.hardware [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63297) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2258.063674] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfad62f-e8a0-4926-b477-747ff21838d0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.071611] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c411ee3-be98-48ba-953a-1e9b8d34a726 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.177149] env[63297]: DEBUG nova.compute.manager [req-4e08e5bc-584e-4acb-8aeb-fa8b68981b12 req-78dddcfd-f559-4785-ac6f-2e55d0a9335f service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Received event network-vif-plugged-8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2258.177330] env[63297]: DEBUG oslo_concurrency.lockutils [req-4e08e5bc-584e-4acb-8aeb-fa8b68981b12 req-78dddcfd-f559-4785-ac6f-2e55d0a9335f service nova] Acquiring lock "20007e28-079a-40a1-bd1f-eafd6a346dfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.177535] env[63297]: DEBUG oslo_concurrency.lockutils [req-4e08e5bc-584e-4acb-8aeb-fa8b68981b12 req-78dddcfd-f559-4785-ac6f-2e55d0a9335f service nova] Lock "20007e28-079a-40a1-bd1f-eafd6a346dfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.177693] env[63297]: DEBUG oslo_concurrency.lockutils [req-4e08e5bc-584e-4acb-8aeb-fa8b68981b12 req-78dddcfd-f559-4785-ac6f-2e55d0a9335f service nova] Lock "20007e28-079a-40a1-bd1f-eafd6a346dfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.177851] env[63297]: DEBUG nova.compute.manager [req-4e08e5bc-584e-4acb-8aeb-fa8b68981b12 req-78dddcfd-f559-4785-ac6f-2e55d0a9335f service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] No waiting events found dispatching network-vif-plugged-8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d {{(pid=63297) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2258.178015] env[63297]: WARNING nova.compute.manager [req-4e08e5bc-584e-4acb-8aeb-fa8b68981b12 req-78dddcfd-f559-4785-ac6f-2e55d0a9335f service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Received unexpected event network-vif-plugged-8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d for instance with vm_state building and task_state spawning. [ 2258.260035] env[63297]: DEBUG nova.network.neutron [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Successfully updated port: 8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d {{(pid=63297) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2258.762227] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquiring lock "refresh_cache-20007e28-079a-40a1-bd1f-eafd6a346dfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2258.762383] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquired lock "refresh_cache-20007e28-079a-40a1-bd1f-eafd6a346dfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2258.762535] env[63297]: DEBUG nova.network.neutron [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Building network info cache for instance {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2259.293247] env[63297]: DEBUG nova.network.neutron [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Instance cache missing network info. {{(pid=63297) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2259.416604] env[63297]: DEBUG nova.network.neutron [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Updating instance_info_cache with network_info: [{"id": "8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d", "address": "fa:16:3e:1a:62:40", "network": {"id": "9bdf7e1e-da1b-4556-a1cb-850451737586", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1112129375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "113bcd2c3af9493caddf933976a80add", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8cae45-0e", "ovs_interfaceid": "8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2259.920063] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Releasing lock "refresh_cache-20007e28-079a-40a1-bd1f-eafd6a346dfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2259.920063] env[63297]: DEBUG nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Instance network_info: |[{"id": "8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d", "address": "fa:16:3e:1a:62:40", "network": {"id": "9bdf7e1e-da1b-4556-a1cb-850451737586", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1112129375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "113bcd2c3af9493caddf933976a80add", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8cae45-0e", "ovs_interfaceid": "8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63297) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2259.920063] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:62:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '26472e27-9835-4f87-ab7f-ca24dfee4e83', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d', 'vif_model': 'vmxnet3'}] {{(pid=63297) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2259.927899] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Creating folder: Project (113bcd2c3af9493caddf933976a80add). Parent ref: group-v353718. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2259.928184] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7b30f3d-7040-4a1b-a165-f307fbc7e31a {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.939789] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Created folder: Project (113bcd2c3af9493caddf933976a80add) in parent group-v353718. [ 2259.939967] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Creating folder: Instances. Parent ref: group-v354054. {{(pid=63297) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2259.940208] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93906c00-9ef4-4d40-923b-e3ed99eb2f11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.950314] env[63297]: INFO nova.virt.vmwareapi.vm_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Created folder: Instances in parent group-v354054. [ 2259.950530] env[63297]: DEBUG oslo.service.loopingcall [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2259.950706] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Creating VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2259.950888] env[63297]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cbb75e4-52bd-407b-bb61-e5d421503497 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.968786] env[63297]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2259.968786] env[63297]: value = "task-1698559" [ 2259.968786] env[63297]: _type = "Task" [ 2259.968786] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.975974] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698559, 'name': CreateVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.200713] env[63297]: DEBUG nova.compute.manager [req-dbf9f760-d595-4aff-9527-720848a7b439 req-4620bde6-4ab3-4f87-8c51-ccd445e2e2b0 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Received event network-changed-8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2260.200929] env[63297]: DEBUG nova.compute.manager [req-dbf9f760-d595-4aff-9527-720848a7b439 req-4620bde6-4ab3-4f87-8c51-ccd445e2e2b0 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Refreshing instance network info cache due to event network-changed-8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d. {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2260.201168] env[63297]: DEBUG oslo_concurrency.lockutils [req-dbf9f760-d595-4aff-9527-720848a7b439 req-4620bde6-4ab3-4f87-8c51-ccd445e2e2b0 service nova] Acquiring lock "refresh_cache-20007e28-079a-40a1-bd1f-eafd6a346dfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.201315] env[63297]: DEBUG oslo_concurrency.lockutils [req-dbf9f760-d595-4aff-9527-720848a7b439 req-4620bde6-4ab3-4f87-8c51-ccd445e2e2b0 service nova] Acquired lock "refresh_cache-20007e28-079a-40a1-bd1f-eafd6a346dfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.201476] env[63297]: DEBUG nova.network.neutron [req-dbf9f760-d595-4aff-9527-720848a7b439 req-4620bde6-4ab3-4f87-8c51-ccd445e2e2b0 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Refreshing network info cache for port 8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d {{(pid=63297) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2260.478278] env[63297]: DEBUG oslo_vmware.api [-] Task: {'id': task-1698559, 'name': CreateVM_Task, 'duration_secs': 0.284758} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.478624] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Created VM on the ESX host {{(pid=63297) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2260.479105] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.479277] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.479593] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2260.479837] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca264d08-a388-460a-b23c-7a0e36e9861d {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.483972] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2260.483972] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e22faa-d5ed-b976-8e7e-9adb44257148" [ 2260.483972] env[63297]: _type = "Task" [ 2260.483972] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.492072] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e22faa-d5ed-b976-8e7e-9adb44257148, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.877194] env[63297]: DEBUG nova.network.neutron [req-dbf9f760-d595-4aff-9527-720848a7b439 req-4620bde6-4ab3-4f87-8c51-ccd445e2e2b0 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Updated VIF entry in instance network info cache for port 8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d. {{(pid=63297) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2260.877557] env[63297]: DEBUG nova.network.neutron [req-dbf9f760-d595-4aff-9527-720848a7b439 req-4620bde6-4ab3-4f87-8c51-ccd445e2e2b0 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Updating instance_info_cache with network_info: [{"id": "8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d", "address": "fa:16:3e:1a:62:40", "network": {"id": "9bdf7e1e-da1b-4556-a1cb-850451737586", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1112129375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "113bcd2c3af9493caddf933976a80add", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8cae45-0e", "ovs_interfaceid": "8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2260.994268] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52e22faa-d5ed-b976-8e7e-9adb44257148, 'name': SearchDatastore_Task, 'duration_secs': 0.012414} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.994539] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.994776] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Processing image 41f1ad71-37f2-4e86-a900-da4965eba44f {{(pid=63297) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2260.995016] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.995171] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.995358] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2260.995642] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f1ec0c1-8c9c-44f2-9038-e87db278d59f {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.003117] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63297) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2261.003294] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63297) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2261.003983] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb5a3375-0f66-439b-892e-a07b81a6d7ad {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.008909] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2261.008909] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bbff49-1531-37d0-67b2-609e7fb3e833" [ 2261.008909] env[63297]: _type = "Task" [ 2261.008909] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.016567] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bbff49-1531-37d0-67b2-609e7fb3e833, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.380642] env[63297]: DEBUG oslo_concurrency.lockutils [req-dbf9f760-d595-4aff-9527-720848a7b439 req-4620bde6-4ab3-4f87-8c51-ccd445e2e2b0 service nova] Releasing lock "refresh_cache-20007e28-079a-40a1-bd1f-eafd6a346dfa" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2261.519277] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]52bbff49-1531-37d0-67b2-609e7fb3e833, 'name': SearchDatastore_Task, 'duration_secs': 0.008188} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2261.520062] env[63297]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5cfc859-5486-4d30-acd2-ccce0f092218 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.524981] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2261.524981] env[63297]: value = "session[525a50a1-1200-7e56-fd66-4d6251696a0b]527d1606-1ee8-0a61-b5f4-861b4ffea8b0" [ 2261.524981] env[63297]: _type = "Task" [ 2261.524981] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.531883] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527d1606-1ee8-0a61-b5f4-861b4ffea8b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.035640] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': session[525a50a1-1200-7e56-fd66-4d6251696a0b]527d1606-1ee8-0a61-b5f4-861b4ffea8b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009474} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2262.035898] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk" {{(pid=63297) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2262.036167] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 20007e28-079a-40a1-bd1f-eafd6a346dfa/20007e28-079a-40a1-bd1f-eafd6a346dfa.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2262.036422] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1625ad0b-7255-477a-9067-7c51601529a5 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.042755] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2262.042755] env[63297]: value = "task-1698560" [ 2262.042755] env[63297]: _type = "Task" [ 2262.042755] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.050264] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698560, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.552626] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698560, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433516} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2262.553035] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/41f1ad71-37f2-4e86-a900-da4965eba44f/41f1ad71-37f2-4e86-a900-da4965eba44f.vmdk to [datastore1] 20007e28-079a-40a1-bd1f-eafd6a346dfa/20007e28-079a-40a1-bd1f-eafd6a346dfa.vmdk {{(pid=63297) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2262.553035] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Extending root virtual disk to 1048576 {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2262.553279] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90b00d82-c1ee-43a7-9e82-edb242e8d296 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.559638] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2262.559638] env[63297]: value = "task-1698561" [ 2262.559638] env[63297]: _type = "Task" [ 2262.559638] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.567390] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698561, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.068710] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698561, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059729} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.068971] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Extended root virtual disk {{(pid=63297) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2263.069730] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed43ced-e526-4948-a92b-6385900444f0 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.090941] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 20007e28-079a-40a1-bd1f-eafd6a346dfa/20007e28-079a-40a1-bd1f-eafd6a346dfa.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2263.091140] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f19b07f2-695b-4647-947e-6bb5bdafba3e {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.110635] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2263.110635] env[63297]: value = "task-1698562" [ 2263.110635] env[63297]: _type = "Task" [ 2263.110635] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.117844] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.620738] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698562, 'name': ReconfigVM_Task, 'duration_secs': 0.266549} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.621186] env[63297]: DEBUG nova.virt.vmwareapi.volumeops [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 20007e28-079a-40a1-bd1f-eafd6a346dfa/20007e28-079a-40a1-bd1f-eafd6a346dfa.vmdk or device None with type sparse {{(pid=63297) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2263.621697] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bb2be20-9dd5-459a-a388-0acd9b1b5770 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.628204] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2263.628204] env[63297]: value = "task-1698563" [ 2263.628204] env[63297]: _type = "Task" [ 2263.628204] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.637053] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698563, 'name': Rename_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.138429] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698563, 'name': Rename_Task, 'duration_secs': 0.124157} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.138704] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Powering on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2264.138952] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48f905ab-e604-44f7-99b3-a0e4ff7fb0de {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.145041] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2264.145041] env[63297]: value = "task-1698564" [ 2264.145041] env[63297]: _type = "Task" [ 2264.145041] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.152062] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.655354] env[63297]: DEBUG oslo_vmware.api [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698564, 'name': PowerOnVM_Task, 'duration_secs': 0.40845} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.655726] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Powered on the VM {{(pid=63297) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2264.655859] env[63297]: INFO nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Took 6.62 seconds to spawn the instance on the hypervisor. [ 2264.655984] env[63297]: DEBUG nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Checking state {{(pid=63297) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2264.656749] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a6e67f-04b5-428e-904a-da43b142b580 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.173836] env[63297]: INFO nova.compute.manager [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Took 11.28 seconds to build instance. [ 2265.665132] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.665132] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.665132] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.665536] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63297) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2265.674888] env[63297]: DEBUG oslo_concurrency.lockutils [None req-61121f39-bb14-4389-b930-c77eed4573ff tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "20007e28-079a-40a1-bd1f-eafd6a346dfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.794s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.325841] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquiring lock "20007e28-079a-40a1-bd1f-eafd6a346dfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.326127] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "20007e28-079a-40a1-bd1f-eafd6a346dfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.326366] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquiring lock "20007e28-079a-40a1-bd1f-eafd6a346dfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.326557] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "20007e28-079a-40a1-bd1f-eafd6a346dfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.326747] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "20007e28-079a-40a1-bd1f-eafd6a346dfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.328842] env[63297]: INFO nova.compute.manager [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Terminating instance [ 2266.330681] env[63297]: DEBUG nova.compute.manager [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Start destroying the instance on the hypervisor. {{(pid=63297) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2266.330870] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Destroying instance {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2266.331723] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbd2159-a453-4367-8d35-e0fff0b0eb61 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.339166] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Powering off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2266.339380] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dea1c361-c5b7-44ba-bd97-b2f3c68fc292 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.345847] env[63297]: DEBUG oslo_vmware.api [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2266.345847] env[63297]: value = "task-1698565" [ 2266.345847] env[63297]: _type = "Task" [ 2266.345847] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.353226] env[63297]: DEBUG oslo_vmware.api [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.666242] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2266.855756] env[63297]: DEBUG oslo_vmware.api [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698565, 'name': PowerOffVM_Task, 'duration_secs': 0.190034} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.856042] env[63297]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Powered off the VM {{(pid=63297) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2266.856222] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Unregistering the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2266.856469] env[63297]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7584e5d6-3817-4cff-861b-085f08e2bb90 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.121929] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Unregistered the VM {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2267.122182] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Deleting contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2267.122369] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Deleting the datastore file [datastore1] 20007e28-079a-40a1-bd1f-eafd6a346dfa {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2267.122728] env[63297]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a258e6c2-e275-4ff8-b0c4-d422686b28f9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.128865] env[63297]: DEBUG oslo_vmware.api [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for the task: (returnval){ [ 2267.128865] env[63297]: value = "task-1698567" [ 2267.128865] env[63297]: _type = "Task" [ 2267.128865] env[63297]: } to complete. {{(pid=63297) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.136257] env[63297]: DEBUG oslo_vmware.api [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.638818] env[63297]: DEBUG oslo_vmware.api [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Task: {'id': task-1698567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129677} completed successfully. {{(pid=63297) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.639203] env[63297]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Deleted the datastore file {{(pid=63297) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2267.639405] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Deleted contents of the VM from datastore datastore1 {{(pid=63297) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2267.639627] env[63297]: DEBUG nova.virt.vmwareapi.vmops [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Instance destroyed {{(pid=63297) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2267.639846] env[63297]: INFO nova.compute.manager [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Took 1.31 seconds to destroy the instance on the hypervisor. [ 2267.640134] env[63297]: DEBUG oslo.service.loopingcall [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63297) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2267.640366] env[63297]: DEBUG nova.compute.manager [-] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Deallocating network for instance {{(pid=63297) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2267.640481] env[63297]: DEBUG nova.network.neutron [-] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] deallocate_for_instance() {{(pid=63297) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2267.664812] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2268.034074] env[63297]: DEBUG nova.compute.manager [req-7635e49e-88f4-47ca-8073-68e3ffbec696 req-5c0449ab-f26d-43c6-9424-a47c9e044782 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Received event network-vif-deleted-8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d {{(pid=63297) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2268.034325] env[63297]: INFO nova.compute.manager [req-7635e49e-88f4-47ca-8073-68e3ffbec696 req-5c0449ab-f26d-43c6-9424-a47c9e044782 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Neutron deleted interface 8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d; detaching it from the instance and deleting it from the info cache [ 2268.034458] env[63297]: DEBUG nova.network.neutron [req-7635e49e-88f4-47ca-8073-68e3ffbec696 req-5c0449ab-f26d-43c6-9424-a47c9e044782 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.454628] env[63297]: DEBUG nova.network.neutron [-] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Updating instance_info_cache with network_info: [] {{(pid=63297) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.537301] env[63297]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b758e1a7-bcfc-4feb-a486-dfff3c776092 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.547169] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7ca9ef-59db-4c3d-96ef-b175f1446d68 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.569854] env[63297]: DEBUG nova.compute.manager [req-7635e49e-88f4-47ca-8073-68e3ffbec696 req-5c0449ab-f26d-43c6-9424-a47c9e044782 service nova] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Detach interface failed, port_id=8a8cae45-0efa-4b0e-9cb1-cfd3880bb42d, reason: Instance 20007e28-079a-40a1-bd1f-eafd6a346dfa could not be found. {{(pid=63297) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2268.665127] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2268.957619] env[63297]: INFO nova.compute.manager [-] [instance: 20007e28-079a-40a1-bd1f-eafd6a346dfa] Took 1.32 seconds to deallocate network for instance. [ 2269.463695] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.464059] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.464261] env[63297]: DEBUG nova.objects.instance [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lazy-loading 'resources' on Instance uuid 20007e28-079a-40a1-bd1f-eafd6a346dfa {{(pid=63297) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2269.660670] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.665298] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager.update_available_resource {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2270.022189] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bbe85f-9d39-4eb7-988f-0e8879295440 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.029526] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241f4108-0745-4213-814e-f651aed61a15 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.058568] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45b619c-23aa-4d7a-84fa-265cc8609d86 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.065452] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20727a5e-b0b4-418e-9f23-b3bc8ed73f3b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.079961] env[63297]: DEBUG nova.compute.provider_tree [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2270.169175] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.599786] env[63297]: ERROR nova.scheduler.client.report [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] [req-0083b5de-0758-4a0c-8d24-3052032ee026] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 88960333-a089-4255-ad72-5c02d57b2b35. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0083b5de-0758-4a0c-8d24-3052032ee026"}]} [ 2270.618388] env[63297]: DEBUG nova.scheduler.client.report [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Refreshing inventories for resource provider 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2270.633675] env[63297]: DEBUG nova.scheduler.client.report [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Updating ProviderTree inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2270.633904] env[63297]: DEBUG nova.compute.provider_tree [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2270.644375] env[63297]: DEBUG nova.scheduler.client.report [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Refreshing aggregate associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, aggregates: None {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2270.667759] env[63297]: DEBUG nova.scheduler.client.report [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Refreshing trait associations for resource provider 88960333-a089-4255-ad72-5c02d57b2b35, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63297) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2270.689715] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e928dd-958b-4a82-8874-4f267a134de2 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.696893] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99daa5f-7ffb-4bcf-949c-943ec1c06908 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.725845] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc1ba2f-f550-46df-8a30-b7ea8346c847 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.732395] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5376986c-0f08-4065-a68e-3f4ad2441ba9 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.744905] env[63297]: DEBUG nova.compute.provider_tree [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2271.274696] env[63297]: DEBUG nova.scheduler.client.report [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Updated inventory for provider 88960333-a089-4255-ad72-5c02d57b2b35 with generation 193 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2271.274989] env[63297]: DEBUG nova.compute.provider_tree [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Updating resource provider 88960333-a089-4255-ad72-5c02d57b2b35 generation from 193 to 194 during operation: update_inventory {{(pid=63297) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2271.275192] env[63297]: DEBUG nova.compute.provider_tree [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Updating inventory in ProviderTree for provider 88960333-a089-4255-ad72-5c02d57b2b35 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2271.779673] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.316s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2271.782366] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.613s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.782550] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2271.782703] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63297) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2271.783640] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071509b2-84c6-4ffa-94c4-94ee3d9cf22b {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.791535] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ca906b-26ba-4576-82c6-e4fa20356506 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.806796] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87081745-aaba-4b98-8277-7fe02f8f5357 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.812933] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2489fa0-d39e-4604-bc89-4ca3c0623cde {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.841245] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181212MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63297) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2271.841371] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.841558] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.844429] env[63297]: INFO nova.scheduler.client.report [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Deleted allocations for instance 20007e28-079a-40a1-bd1f-eafd6a346dfa [ 2272.351867] env[63297]: DEBUG oslo_concurrency.lockutils [None req-cf763ec3-f13f-4d61-a9ae-b8bb9d2b558b tempest-ServerMetadataNegativeTestJSON-1781649476 tempest-ServerMetadataNegativeTestJSON-1781649476-project-member] Lock "20007e28-079a-40a1-bd1f-eafd6a346dfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.025s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2272.862052] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2272.862306] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63297) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2272.874782] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5847b87-def1-42fb-ad4a-1f0e2b7dde45 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.882450] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f2ebea-e2f3-42f9-8f85-74ac193b6a11 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.913299] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc9269c-8a55-4567-86f9-e83791e98e96 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.921081] env[63297]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536f1ede-ab98-4cde-8653-7e693fed8017 {{(pid=63297) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.935735] env[63297]: DEBUG nova.compute.provider_tree [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed in ProviderTree for provider: 88960333-a089-4255-ad72-5c02d57b2b35 {{(pid=63297) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2273.439654] env[63297]: DEBUG nova.scheduler.client.report [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Inventory has not changed for provider 88960333-a089-4255-ad72-5c02d57b2b35 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63297) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2273.945511] env[63297]: DEBUG nova.compute.resource_tracker [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63297) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2273.945831] env[63297]: DEBUG oslo_concurrency.lockutils [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.104s {{(pid=63297) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2276.946209] env[63297]: DEBUG oslo_service.periodic_task [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63297) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2276.946569] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Starting heal instance info cache {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2276.946569] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Rebuilding the list of instances to heal {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2277.449644] env[63297]: DEBUG nova.compute.manager [None req-7c8673ae-4f7a-417e-903c-e043cfe3cffe None None] Didn't find any instances for network info cache update. {{(pid=63297) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}}